From 4b3af40596e12c8512f24532d93fd620a7ce37b4 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Tue, 16 Nov 2021 22:39:00 -0500 Subject: [PATCH 01/36] Create custom compliance feature Create custom compliance feature --- README.md | 1 + development/nautobot_config.py | 1 + docs/installation.md | 1 + docs/navigating-compliance-custom.md | 140 ++++++++++++++ nautobot_golden_config/__init__.py | 1 + nautobot_golden_config/choices.py | 2 + nautobot_golden_config/models.py | 180 ++++++++++++++---- .../nornir_plays/config_backup.py | 1 - .../nornir_plays/config_compliance.py | 2 +- nautobot_golden_config/tests/test_graphql.py | 2 +- .../test_config_compliance.py | 4 +- 11 files changed, 289 insertions(+), 46 deletions(-) create mode 100644 docs/navigating-compliance-custom.md diff --git a/README.md b/README.md index 5f4f97aa..7a8e78fe 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,7 @@ but not want to use other features, which is perfectly fine to do so. - [Navigating SoTAgg](./docs/navigating-sot-agg.md) - [Navigating Compliance](./docs/navigating-compliance.md) - [Navigating JSON Compliance](./docs/navigating-compliance-json.md) +- [Navigating Custom Compliance](./docs/navigating-compliance-custom.md) - [FAQ](./docs/FAQ.md) ## Screenshots diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 4c75b82c..d3336593 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -151,5 +151,6 @@ "enable_intended": is_truthy(os.environ.get("ENABLE_INTENDED", True)), "enable_sotagg": is_truthy(os.environ.get("ENABLE_SOTAGG", True)), "sot_agg_transposer": os.environ.get("SOT_AGG_TRANSPOSER"), + # "get_custom_compliance": "my.custom_compliance.func", }, } diff --git a/docs/installation.md b/docs/installation.md index 422752a2..a8de7b00 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -35,6 +35,7 @@ PLUGINS_CONFIG = { "enable_sotagg": True, "sot_agg_transposer": None, "platform_slug_map": None, + # "get_custom_compliance": "my.custom_compliance.func" }, } diff --git a/docs/navigating-compliance-custom.md b/docs/navigating-compliance-custom.md new file mode 100644 index 00000000..f9ec4549 --- /dev/null +++ b/docs/navigating-compliance-custom.md @@ -0,0 +1,140 @@ +# Navigating Compliance With Custom Logic + +## Caveats + +- The compliance `rule` must be unique for the Custom `config-type`. +- The data provided can come from either setting via the API like JSON or via match_config like CLI. It is up to the operator to enforce. +- Does not make any accommodations for adding to git. +- Mixing/Matching string (or CLI type) and JSON type compliance rules is **NOT** supported. A device should only have compliance rules from one or the other, and it is up to the operator to enforce. + - Applying a `match_config` presumes it is CLI type and not having one presumes it is JSON type. +- If the developer of the `get_custom_compliance` is not cognizant, the UI experience of the details may not always be obvious what the issues are. + - As an example, if the developer simply returns a `True` or `False` into the missing or extra dictionary, it will not be obvious to the user. +- The developer is responsible for ensuring the proper data structure is maintained for the given rule. + +## Justification + +While the maintainers believe that the proper way to provide configuration compliance is the opinionated solutions provided, which compares intended +state vs actual state, we are conscious that this may not always be a viable solution for all organizations. As an example, complicated configurations may +not be ready for intended state, but there are still several parts of the configuration you will need to ensure always exists, such as "making sure all BGP +peers have authentication configured." + +Providing additional opinionated solutions is both not in-line with the intention of the project nor is it feasible to provide a solution that will work +for many people. For those reasons, it was decided to create an extendible interface for developers/operators to create their custom compliance logic. + +Finally, it is understood that one of the key values provided by the Golden Config plugin is the visualization of the compliance and quick access to the +tooling. Providing the interface to `get_custom_compliance` function allows the developers/operators the ability to get their own compliance process +integrated with the user experience provided by the plugin. + +## The Interface + +The interface of contract provided to your custom function is based on the following: + +Inputs: +- The function is called with a single parameter called `obj`, so your function must be set to accept `obj` as a kwarg. +- The `obj` parameter, is the `self` instance object of a `ConfigCompliance` model, review the documentation for the all attributes of a `ConfigCompliance` instance, but the common ones are documented below. + - `obj.actual` - The **actual** configuration parsed out by the `match_config` logic, or what was sent via the API. + - `obj.intended` - The **intended** configuration parsed out by the `match_config` logic, or what was sent via the API. + - `obj.device.platform.slug` - The platform slug name. + - `obj.rule.config_ordered` - describes whether or not the rule was configured to be ordered, such as an ACL, or not such as SNMP servers + - `obj.rule` - The name of the rule. + - `obj.rule.match_config` - The match_config text the rule was configured with. + +Outputs +- The function should return a single dictionary, with the keys of `compliance`, `compliance_int`, `ordered`, `missing`, and `extra`. +- The `compliance` key should be a boolean with either True or False as acceptable responses, which determines if the config is compliant or not. +- The `compliance_int` key should be an integer with either 1 (when compliance is True) or 0 (when compliance is False) as acceptable responses. This is required to handle a counting use case where boolean does not suffice. +- The `ordered` key should be a boolean with either True or False as acceptable responses, which determines if the config is compliant and ordered or not. +- The `missing` key should be a string or json, empty when nothing is missing and appropriate string or json data when configuration is missing. +- The `extra` key should be a string or json, empty when nothing is extra and appropriate string or json data when there is extra configuration. + +There is validation to ensure the data structure returned is compliant to the above assertions. + +The function provided in string path format, must be installed in the same environment as nautobot and the workers. + +## Configuration + +The path to the function is set in the `get_custom_compliance` configuration parameter. This is the string representation of the function and must be in +Python importable into Nautobot and the workers. This is a callable function and not a class or other object type. + +```python +PLUGINS_CONFIG = { + "nautobot_golden_config": { + "get_custom_compliance": "my.custom_compliance.custom_compliance_func" + } +} +``` + +## Example + +To provide boiler plate code for any future use case, the following is provided + +```python +def custom_compliance_func(obj): + # Modify with actual logic, this would always presume compliant. + compliance_int = 1 + compliance = True + ordered = True + missing = "" + extra = "" + return { + "compliance": compliance, + "compliance_int": compliance_int, + "ordered": ordered, + "missing": missing, + "extra": extra, + } +``` + +Below is an actual example, it takes a very direct approach for matching platform and rule type to a check. This can naturally be modified to apply the abstract logic one may wish to provide. + +```python +# expected_actual_config = '''router bgp 400 +# no synchronization +# bgp log-neighbor-changes +# neighbor 70.70.70.70 remote-as 400 +# neighbor 70.70.70.70 password cisco +# neighbor 70.70.70.70 update-source Loopback80 +# no auto-summary +# ''' +import re +BGP_PATTERN = re.compile("\s*neighbor (?P\d+\.\d+\.\d+\.\d+) .*") +BGP_SECRET = re.compile("\s*neighbor (?P\d+\.\d+\.\d+\.\d+) password (\S+).*") +def custom_compliance_func(obj): + if obj.rule == 'bgp' and obj.device.platform.slug == 'ios': + actual_config = obj.actual + neighbors = [] + secrets = [] + for line in actual_config.splitlines(): + match = BGP_PATTERN.search(line) + if match: + neighbors.append(match.groups("ip")[0]) + secret_match = BGP_SECRET.search(line) + if secret_match: + secrets.append(match.groups("ip")[0]) + neighbors = list(set(neighbors)) + secrets = list(set(secrets)) + if secrets != neighbors: + compliance_int = 0 + compliance = False + ordered = False + missing = f"neighbors Found: {str(neighbors)}\nneigbors with secrets found: {str(secrets)}" + extra = "" + else: + compliance_int = 1 + compliance = True + ordered = True + missing = "" + extra = "" + return { + "compliance": compliance, + "compliance_int": compliance_int, + "ordered": ordered, + "missing": missing, + "extra": extra, + } +``` + +In the above example, one may observe that there is no reference to `obj.intended`, that is because this logic is not concerned about such information. +As the developer of such solutions, you may not require intended configuration or other attributes, but be conscious on the user experience +implications. It may seem odd to some users to have blank intended configuration but compliance set to true as an example or it may seem odd to have +instructions for fixes rather than configurations. \ No newline at end of file diff --git a/nautobot_golden_config/__init__.py b/nautobot_golden_config/__init__.py index 820a8c3c..0993e49d 100644 --- a/nautobot_golden_config/__init__.py +++ b/nautobot_golden_config/__init__.py @@ -23,6 +23,7 @@ class GoldenConfig(PluginConfig): "per_feature_bar_width": 0.3, "per_feature_width": 13, "per_feature_height": 4, + "get_custom_compliance": None, } diff --git a/nautobot_golden_config/choices.py b/nautobot_golden_config/choices.py index 98ad7c47..55ec565c 100644 --- a/nautobot_golden_config/choices.py +++ b/nautobot_golden_config/choices.py @@ -7,8 +7,10 @@ class ComplianceRuleTypeChoice(ChoiceSet): TYPE_CLI = "cli" TYPE_JSON = "json" + TYPE_CUSTOM = "custom" CHOICES = ( (TYPE_CLI, "CLI"), (TYPE_JSON, "JSON"), + (TYPE_CUSTOM, "CUSTOM"), ) diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index 57e5df8b..fd2f45b1 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -1,12 +1,14 @@ """Django Models for tracking the configuration compliance per feature and device.""" import logging +import json from deepdiff import DeepDiff from django.db import models from django.core.exceptions import ValidationError from django.core.serializers.json import DjangoJSONEncoder from django.shortcuts import reverse +from django.utils.module_loading import import_string from graphene_django.settings import graphene_settings from graphql import get_default_backend from graphql.error import GraphQLSyntaxError @@ -20,18 +22,135 @@ from nautobot_golden_config.choices import ComplianceRuleTypeChoice from nautobot_golden_config.utilities.utils import get_platform +from nautobot_golden_config.utilities.constant import PLUGIN_CFG LOGGER = logging.getLogger(__name__) GRAPHQL_STR_START = "query ($device_id: ID!)" +ERROR_MSG = ( + "There was an issue with the data that was returned by your get_custom_compliance function. " + "This is a local issue that requires the attention of your systems administrator and not something " + "that can be fixed within the Golden Config plugin. " +) +MISSING_MSG = ( + ERROR_MSG + "Specifically the `{}` key was not found in value the get_custom_compliance function provided." +) +VALIDATION_MSG = ( + ERROR_MSG + "Specifically the key {} was expected to be of type(s) {} and the value of {} was not that type(s)." +) + -def null_to_empty(val): +def _is_jsonable(val): + """Check is value can be converted to json.""" + try: + json.dumps(val) + return True + except (TypeError, OverflowError): + return False + + +def _null_to_empty(val): """Convert to empty string if the value is currently null.""" if not val: return "" return val +def _get_cli_compliance(obj): + """This function performs the actual compliance for cli configuration.""" + feature = { + "ordered": obj.rule.config_ordered, + "name": obj.rule, + } + feature.update({"section": obj.rule.match_config.splitlines()}) + value = feature_compliance(feature, obj.actual, obj.intended, get_platform(obj.device.platform.slug)) + compliance = value["compliant"] + if compliance: + compliance_int = 1 + ordered = value["ordered_compliant"] + else: + compliance_int = 0 + ordered = value["ordered_compliant"] + missing = _null_to_empty(value["missing"]) + extra = _null_to_empty(value["extra"]) + return { + "compliance": compliance, + "compliance_int": compliance_int, + "ordered": ordered, + "missing": missing, + "extra": extra, + } + + +def _get_json_compliance(obj): + """This function performs the actual compliance for json serializable data.""" + + def _normalize_diff(diff, path_to_diff): + """Normalizes the diff to a list of keys and list indexes that have changed.""" + dictionary_items = list(diff.get(f"dictionary_item_{path_to_diff}", [])) + list_items = list(diff.get(f"iterable_item_{path_to_diff}", {}).keys()) + values_changed = list(diff.get("values_changed", {}).keys()) + type_changes = list(diff.get("type_changes", {}).keys()) + return dictionary_items + list_items + values_changed + type_changes + + diff = DeepDiff(obj.actual, obj.intended, ignore_order=obj.ordered, report_repetition=True) + if not diff: + compliance_int = 1 + compliance = True + ordered = True + missing = "" + extra = "" + else: + compliance_int = 0 + compliance = False + ordered = False + missing = _null_to_empty(_normalize_diff(diff, "added")) + extra = _null_to_empty(_normalize_diff(diff, "removed")) + + return { + "compliance": compliance, + "compliance_int": compliance_int, + "ordered": ordered, + "missing": missing, + "extra": extra, + } + + +def _verify_get_custom_compliance_data(compliance_details): + """This function verifies the data is as expected when a custom function is used.""" + for val in ["compliance", "compliance_int", "ordered", "missing", "extra"]: + try: + compliance_details[val] + except KeyError: + raise ValidationError(MISSING_MSG.format(val)) from KeyError + for val in ["compliance", "ordered"]: + if compliance_details[val] not in [True, False]: + raise ValidationError(VALIDATION_MSG.format(val, "Boolean", compliance_details[val])) + if compliance_details["compliance_int"] not in [0, 1]: + raise ValidationError(VALIDATION_MSG.format("compliance_int", "0 or 1", compliance_details["compliance_int"])) + for val in ["missing", "extra"]: + if not isinstance(compliance_details[val], str) and not _is_jsonable(compliance_details[val]): + raise ValidationError(VALIDATION_MSG.format(val, "String or Json", compliance_details[val])) + + +# The below maps the provided compliance types +FUNC_MAPPER = { + ComplianceRuleTypeChoice.TYPE_CLI: _get_cli_compliance, + ComplianceRuleTypeChoice.TYPE_JSON: _get_json_compliance, +} +# The below conditionally add the cusom provided compliance type +if PLUGIN_CFG.get("get_custom_compliance"): + try: + FUNC_MAPPER[ComplianceRuleTypeChoice.TYPE_CUSTOM] = import_string(PLUGIN_CFG["get_custom_compliance"]) + except Exception as error: # pylint: disable=broad-except + msg = ( + "There was an issue attempting to import the get_custom_compliance function of" + f"{PLUGIN_CFG['get_custom_compliance']}, this is expected with a local configuration issue " + "and not related to the Golden Configuration Plugin, please contact your system admin for further details" + ) + raise Exception(msg).with_traceback(error.__traceback__) + + @extras_features( "custom_fields", "custom_validators", @@ -199,46 +318,25 @@ def __str__(self): return f"{self.device} -> {self.rule} -> {self.compliance}" def save(self, *args, **kwargs): - """Performs the actual compliance check.""" - feature = { - "ordered": self.rule.config_ordered, - "name": self.rule, - } - if self.rule.config_type == ComplianceRuleTypeChoice.TYPE_JSON: - feature.update({"section": self.rule.match_config}) - - diff = DeepDiff(self.actual, self.intended, ignore_order=self.ordered, report_repetition=True) - if not diff: - self.compliance_int = 1 - self.compliance = True - self.missing = "" - self.extra = "" - else: - self.compliance_int = 0 - self.compliance = False - self.missing = null_to_empty(self._normalize_diff(diff, "added")) - self.extra = null_to_empty(self._normalize_diff(diff, "removed")) - else: - feature.update({"section": self.rule.match_config.splitlines()}) - value = feature_compliance(feature, self.actual, self.intended, get_platform(self.device.platform.slug)) - self.compliance = value["compliant"] - if self.compliance: - self.compliance_int = 1 - else: - self.compliance_int = 0 - self.ordered = value["ordered_compliant"] - self.missing = null_to_empty(value["missing"]) - self.extra = null_to_empty(value["extra"]) - super().save(*args, **kwargs) + """The actual configuration compliance happens here, but the details for actual compliance job would be found in FUNC_MAPPER.""" + if self.rule.config_type == ComplianceRuleTypeChoice.TYPE_CUSTOM and not FUNC_MAPPER.get( + ComplianceRuleTypeChoice.TYPE_CUSTOM + ): + raise ValidationError( + "Custom type provided, but no `get_custom_compliance` config set, please contact system admin." + ) + + compliance_details = FUNC_MAPPER[self.rule.config_type](obj=self) + if self.rule.config_type == ComplianceRuleTypeChoice.TYPE_CUSTOM: + _verify_get_custom_compliance_data(compliance_details) + + self.compliance = compliance_details["compliance"] + self.compliance_int = compliance_details["compliance_int"] + self.ordered = compliance_details["ordered"] + self.missing = compliance_details["missing"] + self.extra = compliance_details["extra"] - @staticmethod - def _normalize_diff(diff, path_to_diff): - """Normalizes the diff to a list of keys and list indexes that have changed.""" - dictionary_items = list(diff.get(f"dictionary_item_{path_to_diff}", [])) - list_items = list(diff.get(f"iterable_item_{path_to_diff}", {}).keys()) - values_changed = list(diff.get("values_changed", {}).keys()) - type_changes = list(diff.get("type_changes", {}).keys()) - return dictionary_items + list_items + values_changed + type_changes + super().save(*args, **kwargs) @extras_features( @@ -581,4 +679,4 @@ def get_absolute_url(self): def __str__(self): """Return a simple string if model is called.""" - return self.name + return self.name \ No newline at end of file diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py index 68f621dd..d051d64f 100644 --- a/nautobot_golden_config/nornir_plays/config_backup.py +++ b/nautobot_golden_config/nornir_plays/config_backup.py @@ -7,7 +7,6 @@ from nornir.core.task import Result, Task from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.tasks.dispatcher import dispatcher from nornir_nautobot.utils.logger import NornirLogger diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py index 36b3e799..881be39b 100644 --- a/nautobot_golden_config/nornir_plays/config_compliance.py +++ b/nautobot_golden_config/nornir_plays/config_compliance.py @@ -35,7 +35,7 @@ def get_rules(): """A serializer of sorts to return rule mappings as a dictionary.""" # TODO: Review if creating a proper serializer is the way to go. rules = {} - for obj in ComplianceRule.objects.filter(config_type="cli"): + for obj in ComplianceRule.objects.exclude(match_config__exact=""): platform = str(obj.platform.slug) if not rules.get(platform): rules[platform] = [] diff --git a/nautobot_golden_config/tests/test_graphql.py b/nautobot_golden_config/tests/test_graphql.py index 6724258f..b8b3d42d 100644 --- a/nautobot_golden_config/tests/test_graphql.py +++ b/nautobot_golden_config/tests/test_graphql.py @@ -218,7 +218,7 @@ def test_query_config_compliance(self): "rule": {"feature": {"name": "aaa"}}, "intended": "aaa test", "missing": "", - "ordered": False, + "ordered": True, } ] } diff --git a/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py b/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py index 6a74f8b5..603e0754 100644 --- a/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py +++ b/nautobot_golden_config/tests/test_nornir_plays/test_config_compliance.py @@ -15,9 +15,9 @@ def test_get_rules(self, mock_compliance_rule): mock_obj = Mock(**features) mock_obj.name = "test_name" mock_obj.platform = Mock(slug="test_slug") - mock_compliance_rule.objects.filter.return_value = [mock_obj] + mock_compliance_rule.objects.exclude.return_value = [mock_obj] features = get_rules() - mock_compliance_rule.objects.filter.assert_called_once() + mock_compliance_rule.objects.exclude.assert_called_once() self.assertEqual( features, {"test_slug": [{"obj": mock_obj, "ordered": "test_ordered", "section": ["aaa", "snmp"]}]} ) From 787453858c0b2908da9fe6f955664b21e5de5859 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Sat, 20 Nov 2021 11:00:26 -0500 Subject: [PATCH 02/36] run black --- nautobot_golden_config/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index fd2f45b1..9e755d5e 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -679,4 +679,4 @@ def get_absolute_url(self): def __str__(self): """Return a simple string if model is called.""" - return self.name \ No newline at end of file + return self.name From e0d6f703a479741f3271278487c17ca0c1516d9f Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Sat, 27 Nov 2021 00:26:45 -0500 Subject: [PATCH 03/36] fix pylint issue --- nautobot_golden_config/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index 9e755d5e..cc4150cc 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -508,8 +508,8 @@ def clean(self): backend = get_default_backend() schema = graphene_settings.SCHEMA backend.document_from_string(schema, str(self.sot_agg_query)) - except GraphQLSyntaxError as error: - raise ValidationError(str(error)) # pylint: disable=raise-missing-from + except GraphQLSyntaxError as err: + raise ValidationError(str(err)) # pylint: disable=raise-missing-from LOGGER.debug("GraphQL - test query start with: `%s`", GRAPHQL_STR_START) if not str(self.sot_agg_query).startswith(GRAPHQL_STR_START): From 206f2e3b675a08e5eb232d199a685bddf984a106 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Thu, 25 Nov 2021 11:49:05 -0500 Subject: [PATCH 04/36] Update policy and start following policy --- README.md | 32 ++++++++++++++++++++++++------ nautobot_golden_config/__init__.py | 2 +- pyproject.toml | 2 +- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 7a8e78fe..a24dc41f 100644 --- a/README.md +++ b/README.md @@ -68,10 +68,12 @@ The project is following Network to Code software development guidelines and are The branching policy includes the following tenets: -- The develop branch is the branch of the next major or minor version planned. -- The `stable-.` branch is the branch of the latest version within that major/minor version -- PRs intended to add new features should be sourced from the develop branch -- PRs intended to address bug fixes and security patches should be sourced from `stable-.` +- The develop branch is the branch of the next major and minor paired version planned. +- The `stable-.` branch is the branch of the latest version within that major/minor version. +- The `stable-.` branch will have all of the latest bug fixes and security patches, and may or may not represent the released version. +- PRs intended to add new features should be sourced from the develop branch. +- PRs intended to add new features that break backward compatability should be discussed before a PR is created. +- PRs intended to address bug fixes and security patches should be sourced from `stable-.`. Nautobot Golden Config will observe semantic versioning, as of 1.0. This may result in an quick turn around in minor versions to keep pace with an ever growing feature set. @@ -80,11 +82,29 @@ pace with an ever growing feature set. Nautobot Golden Config has currently no intended scheduled release schedule, and will release new feature in minor versions. +When a new release of any kind (e.g. from develop to main, or a release of a `stable-.`) is created the following should happen. +- A release PR is created with: + - Update to the CHANGELOG.md file to reflect the changes. + - Change the version from `..-beta` to `..` in both pyproject.toml and `nautobot.__init__.__version__`. + - Set the PR to the proper branch, e.g. either `main` or `stable-.`. +- Ensure the tests for the PR pass. +- Merge the PR. +- Create a new tag: + - The tag should be in the form of `v..`. + - The title should be in the form of `v..`. + - The description should be the changes that were added to the CHANGELOG.md document. +- If merged into main, then push from main to develop, in order to retain the merge commit created when the PR was merged +- If the is a new `.`, create a `stable-.` branch and push that to the repo. +- A post release PR is created with. + - Change the version from `..` to `..-beta` in both pyproject.toml and `nautobot.__init__.__version__`. + - Set the PR to the proper branch, e.g. either `develop` or `stable-.`. + - Once tests pass, merge. + ## Deprecation Policy Support of upstream Nautobot will be announced 1 minor or major version ahead. Deprecation policy will be announced within the -CHANGELOG.md file, and updated in the table below. There will be a `stable-.` branch that will be minimally maintained. -Any security enhancements or major bugs will be supported for a limited time. +CHANGELOG.md file, and updated in the table below. There will be a `stable-.` branch that will be minimally maintained, +for any security enhancements or major bugs will be supported for a limited time. | Golden Config Version | Nautobot First Support Version | Nautobot Last Support Version | | --------------------- | ------------------------------ | ----------------------------- | diff --git a/nautobot_golden_config/__init__.py b/nautobot_golden_config/__init__.py index 4d210ce5..c39d5b48 100644 --- a/nautobot_golden_config/__init__.py +++ b/nautobot_golden_config/__init__.py @@ -1,6 +1,6 @@ """Plugin declaration for nautobot_golden_config.""" -__version__ = "0.9.10" +__version__ = "1.0.0-beta" from nautobot.extras.plugins import PluginConfig diff --git a/pyproject.toml b/pyproject.toml index ab1f4e1f..09be4e0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-golden-config" -version = "0.9.10" +version = "1.0.0-beta" description = "A plugin for configuration on nautobot" authors = ["Network to Code, LLC", ""] From 102bafa1cbdb4442850c844d406055656a0e3813 Mon Sep 17 00:00:00 2001 From: Jacob McGill <9847006+jmcgill298@users.noreply.github.com> Date: Tue, 30 Nov 2021 14:21:49 -0500 Subject: [PATCH 05/36] Allow for Jinja2 Filters to be used by GoldenConfig templates (#158) * Allow for Jinja2 Filters to be used by GoldenConfig templates --- .pydocstyle.ini | 2 +- development/Dockerfile | 2 +- development/nautobot_config.py | 17 +++++ docs/navigating-intended.md | 48 ++++++++++++++ .../nornir_plays/config_backup.py | 4 +- .../nornir_plays/config_compliance.py | 6 +- .../nornir_plays/config_intended.py | 45 +++++++++---- nautobot_golden_config/tests/jinja_filters.py | 6 ++ .../tests/test_utilities/test_helpers.py | 64 +++++++++++++------ nautobot_golden_config/utilities/helper.py | 55 +++++++++++----- pyproject.toml | 7 +- tasks.py | 2 +- 12 files changed, 202 insertions(+), 56 deletions(-) create mode 100644 nautobot_golden_config/tests/jinja_filters.py diff --git a/.pydocstyle.ini b/.pydocstyle.ini index 71bf7596..951011dd 100644 --- a/.pydocstyle.ini +++ b/.pydocstyle.ini @@ -2,7 +2,7 @@ convention = google inherit = false match = (?!__init__).*\.py -match-dir = (?!tests)[^\.].* +match-dir = (?!tests|migrations)[^\.].* # D212 is enabled by default in google convention, and complains if we have a docstring like: # """ # My docstring is on the line after the opening quotes instead of on the same line as them. diff --git a/development/Dockerfile b/development/Dockerfile index 4cb91cc3..c15dc4e1 100644 --- a/development/Dockerfile +++ b/development/Dockerfile @@ -18,4 +18,4 @@ RUN apt update RUN apt install -y libmariadb-dev-compat gcc RUN pip install mysqlclient -COPY development/nautobot_config.py /opt/nautobot/nautobot_config.py \ No newline at end of file +COPY development/nautobot_config.py /opt/nautobot/nautobot_config.py diff --git a/development/nautobot_config.py b/development/nautobot_config.py index d3336593..ebb241b6 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -154,3 +154,20 @@ # "get_custom_compliance": "my.custom_compliance.func", }, } + +# Modify django_jinja Environment for test cases +django_jinja_config = None +for template in TEMPLATES: + if template["BACKEND"].startswith("django_jinja"): + django_jinja_config = template + +if django_jinja_config is not None: + jinja_options = django_jinja_config.get("OPTIONS") + if not jinja_options: + jinja_options = {} + django_jinja_config["OPTIONS"] = jinja_options + # Default behavior ignores UndefinedErrors + jinja_options["undefined"] = "jinja2.StrictUndefined" + +# Import filter function to have it register filter with django_jinja +from nautobot_golden_config.tests import jinja_filters # noqa: E402 diff --git a/docs/navigating-intended.md b/docs/navigating-intended.md index 8d9fcbf1..4bb7d412 100644 --- a/docs/navigating-intended.md +++ b/docs/navigating-intended.md @@ -29,6 +29,54 @@ or {% endfor %} ``` +## Adding Jinja2 Filters to the Environment. + +This plugin follows [Nautobot](https://nautobot.readthedocs.io/en/stable/plugins/development/#including-jinja2-filters) +in relying on [django_jinja](https://niwinz.github.io/django-jinja/latest/) for customizing the Jinja2 Environment. +Currently, only filters in the django_jinja Environment are passed along to +the Jinja2 Template Environment used by Nornir to render the config template. + +### Adding Filters In Nautobot Config + +Nautobot documents using the `@django_jinja.library.filter` decorator to register functions as filters with django_jinja. +However, users of plugins are not able to define plugins in the specified jinja2 filter file that is loaded into the Jinja2 Environment. +There are several alternative ways to have functions registered as filters in the django_jinja environment; +below demonstrates defining decorated functions in a separate file, and then importing them in the `nautobot_config.py` file. +This method requires that the file is in a path that is available to Nautobot's python environment. + +> django_jinja documents adding filters in the `TEMPLATES` config section; +> since Nautobot sets the `TEMPLATES` config section and does not document this in optional settings, +> it is recommended to only use the `@django_jinja.library.filter` decorator. + +#### custom_jinja_filters/config_templates.py + +```python +import ipaddress + +from django_jinja import library + + +@library.filter +def get_hostmask(address): + ip_address = ipaddress.ip_network(address) + return str(ip_address.hostmask) + + +@library.filter +def get_netmask(address): + ip_address = ipaddress.ip_network(address) + return str(ip_address.netmask) +``` + +#### nautobot_config.py + +```python +... +# custom_jinja_filters must be in nautobot's python path +from custom_jinja_filters import config_templates +... +``` + ## Starting a Intended Configuration Job To start a intended configuration job manually: diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py index d051d64f..4d8bdc0e 100644 --- a/nautobot_golden_config/nornir_plays/config_backup.py +++ b/nautobot_golden_config/nornir_plays/config_backup.py @@ -18,7 +18,7 @@ from nautobot_golden_config.utilities.helper import ( get_job_filter, verify_global_settings, - check_jinja_template, + render_jinja_template, ) from nautobot_golden_config.models import ( GoldenConfigSetting, @@ -54,7 +54,7 @@ def run_backup( # pylint: disable=too-many-arguments backup_obj.backup_last_attempt_date = task.host.defaults.data["now"] backup_obj.save() - backup_path_template_obj = check_jinja_template(obj, logger, global_settings.backup_path_template) + backup_path_template_obj = render_jinja_template(obj, logger, global_settings.backup_path_template) backup_file = os.path.join(backup_root_folder, backup_path_template_obj) if global_settings.backup_test_connectivity is not False: diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py index 881be39b..1d594e3d 100644 --- a/nautobot_golden_config/nornir_plays/config_compliance.py +++ b/nautobot_golden_config/nornir_plays/config_compliance.py @@ -21,7 +21,7 @@ from nautobot_golden_config.utilities.helper import ( get_job_filter, verify_global_settings, - check_jinja_template, + render_jinja_template, ) from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig from nautobot_golden_config.utilities.utils import get_platform @@ -76,7 +76,7 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals compliance_obj.compliance_last_attempt_date = task.host.defaults.data["now"] compliance_obj.save() - intended_path_template_obj = check_jinja_template(obj, logger, global_settings.intended_path_template) + intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) intended_file = os.path.join(intended_root_folder, intended_path_template_obj) @@ -84,7 +84,7 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals logger.log_failure(obj, f"Unable to locate intended file for device at {intended_file}") raise NornirNautobotException() - backup_template = check_jinja_template(obj, logger, global_settings.backup_path_template) + backup_template = render_jinja_template(obj, logger, global_settings.backup_path_template) backup_file = os.path.join(backup_root_path, backup_template) if not os.path.exists(backup_file): diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index fe515cd3..cc7a433f 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -8,6 +8,7 @@ from nornir.core.plugins.inventory import InventoryPluginRegister from nornir.core.task import Result, Task +from django_jinja.backend import Jinja2 from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.tasks.dispatcher import dispatcher @@ -21,7 +22,7 @@ from nautobot_golden_config.utilities.helper import ( get_job_filter, verify_global_settings, - check_jinja_template, + render_jinja_template, ) from nautobot_golden_config.utilities.graphql import graph_ql_query from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig @@ -31,7 +32,7 @@ def run_template( # pylint: disable=too-many-arguments - task: Task, logger, global_settings, job_result, jinja_root_path, intended_root_folder + task: Task, logger, global_settings, nautobot_job, jinja_root_path, intended_root_folder ) -> Result: """Render Jinja Template. @@ -39,6 +40,11 @@ def run_template( # pylint: disable=too-many-arguments Args: task (Task): Nornir task individual object + logger (NornirLogger): Logger to log messages to. + global_settings (GoldenConfigSetting): The settings for GoldenConfigPlugin. + nautobot_job (Result): The Nautobot Job instance being ran. + jinja_root_path (str): The root path to the Jinja2 intended config file. + intended_root_folder (str): The root folder for rendered intended output configs. Returns: result (Result): Result from Nornir task @@ -51,27 +57,31 @@ def run_template( # pylint: disable=too-many-arguments intended_obj.intended_last_attempt_date = task.host.defaults.data["now"] intended_obj.save() - intended_path_template_obj = check_jinja_template(obj, logger, global_settings.intended_path_template) - output_file_location = os.path.join(intended_root_folder, intended_path_template_obj) + # Render output relative filepath and jinja template filenames + intended_output_filepath = render_jinja_template(obj, logger, global_settings.intended_path_template) + jinja_intended_template_filename = render_jinja_template(obj, logger, global_settings.jinja_path_template) - jinja_template = check_jinja_template(obj, logger, global_settings.jinja_path_template) - - status, device_data = graph_ql_query(job_result.request, obj, global_settings.sot_agg_query) + output_file_location = os.path.join(intended_root_folder, intended_output_filepath) + status, device_data = graph_ql_query(nautobot_job.request, obj, global_settings.sot_agg_query) if status != 200: logger.log_failure(obj, f"The GraphQL query return a status of {str(status)} with error of {str(device_data)}") raise NornirNautobotException() task.host.data.update(device_data) + jinja_settings = Jinja2.get_default() + jinja_env = jinja_settings.env + generated_config = task.run( task=dispatcher, name="GENERATE CONFIG", method="generate_config", obj=obj, logger=logger, - jinja_template=jinja_template, + jinja_template=jinja_intended_template_filename, jinja_root_path=jinja_root_path, output_file_location=output_file_location, default_drivers_mapping=get_dispatcher(), + jinja_filters=jinja_env.filters, )[1].result["config"] intended_obj.intended_last_success_date = task.host.defaults.data["now"] intended_obj.intended_config = generated_config @@ -82,10 +92,21 @@ def run_template( # pylint: disable=too-many-arguments return Result(host=task.host, result=generated_config) -def config_intended(job_result, data, jinja_root_path, intended_root_folder): - """Nornir play to generate configurations.""" +def config_intended(nautobot_job, data, jinja_root_path, intended_root_folder): + """ + Nornir play to generate configurations. + + Args: + nautobot_job (Result): The Nautobot Job instance being ran. + data (dict): Form data from Nautobot Job. + jinja_root_path (str): The root path to the Jinja2 intended config file. + intended_root_folder (str): The root folder for rendered intended output configs. + + Returns: + None: Intended configuration files are written to filesystem. + """ now = datetime.now() - logger = NornirLogger(__name__, job_result, data.get("debug")) + logger = NornirLogger(__name__, nautobot_job, data.get("debug")) global_settings = GoldenConfigSetting.objects.first() verify_global_settings(logger, global_settings, ["jinja_path_template", "intended_path_template", "sot_agg_query"]) try: @@ -111,7 +132,7 @@ def config_intended(job_result, data, jinja_root_path, intended_root_folder): name="RENDER CONFIG", logger=logger, global_settings=global_settings, - job_result=job_result, + nautobot_job=nautobot_job, jinja_root_path=jinja_root_path, intended_root_folder=intended_root_folder, ) diff --git a/nautobot_golden_config/tests/jinja_filters.py b/nautobot_golden_config/tests/jinja_filters.py new file mode 100644 index 00000000..341df382 --- /dev/null +++ b/nautobot_golden_config/tests/jinja_filters.py @@ -0,0 +1,6 @@ +from django_jinja import library + + +@library.filter +def return_a(x): + return "a" diff --git a/nautobot_golden_config/tests/test_utilities/test_helpers.py b/nautobot_golden_config/tests/test_utilities/test_helpers.py index 9304bf3f..1768b642 100644 --- a/nautobot_golden_config/tests/test_utilities/test_helpers.py +++ b/nautobot_golden_config/tests/test_utilities/test_helpers.py @@ -1,14 +1,18 @@ """Unit tests for nautobot_golden_config utilities helpers.""" import unittest -from unittest.mock import patch, Mock +from unittest.mock import patch + +from nautobot.dcim.models import Device + from nornir_nautobot.exceptions import NornirNautobotException -from jinja2.exceptions import TemplateError +from jinja2 import exceptions as jinja_errors from nautobot_golden_config.utilities.helper import ( null_to_empty, - check_jinja_template, + render_jinja_template, ) + # pylint: disable=no-self-use @@ -25,29 +29,51 @@ def test_null_to_empty_val(self): result = null_to_empty("test") self.assertEqual(result, "test") - def test_check_jinja_template_success(self): + @patch("nautobot.dcim.models.Device") + def test_render_jinja_template_success(self, mock_device): """Simple success test to return template.""" - worker = check_jinja_template("obj", "logger", "fake-template-name") - self.assertEqual(worker, "fake-template-name") + worker = render_jinja_template(mock_device, "logger", "fake-template-contents") + self.assertEqual(worker, "fake-template-contents") + + @patch("nautobot.dcim.models.Device") + def test_render_jinja_template_success_render_context(self, mock_device): + """Test that device object is passed to template context.""" + platform = "mock_platform" + mock_device.platform = platform + rendered_template = render_jinja_template(mock_device, "logger", "{{ obj.platform }}") + self.assertEqual(rendered_template, platform) + + @patch("nautobot.dcim.models.Device") + def test_render_jinja_template_success_with_filter(self, mock_device): + """Test custom template and jinja filter are accessible.""" + rendered_template = render_jinja_template(mock_device, "logger", "{{ data | return_a }}") + self.assertEqual(rendered_template, "a") - def test_check_jinja_template_exceptions_undefined(self): + @patch("nornir_nautobot.utils.logger.NornirLogger") + @patch("nautobot.dcim.models.Device", spec=Device) + def test_render_jinja_template_exceptions_undefined(self, mock_device, mock_nornir_logger): """Use fake obj key to cause UndefinedError from Jinja2 Template.""" - log_mock = Mock() with self.assertRaises(NornirNautobotException): - check_jinja_template("test-obj", log_mock, "{{ obj.fake }}") + with self.assertRaises(jinja_errors.UndefinedError): + render_jinja_template(mock_device, mock_nornir_logger, "{{ obj.fake }}") + mock_nornir_logger.log_failure.assert_called_once() - def test_check_jinja_template_exceptions_syntaxerror(self): + @patch("nornir_nautobot.utils.logger.NornirLogger") + @patch("nautobot.dcim.models.Device") + def test_render_jinja_template_exceptions_syntaxerror(self, mock_device, mock_nornir_logger): """Use invalid templating to cause TemplateSyntaxError from Jinja2 Template.""" - log_mock = Mock() with self.assertRaises(NornirNautobotException): - check_jinja_template("test-obj", log_mock, "{{ obj.fake }") + with self.assertRaises(jinja_errors.TemplateSyntaxError): + render_jinja_template(mock_device, mock_nornir_logger, "{{ obj.fake }") + mock_nornir_logger.log_failure.assert_called_once() - @patch("nautobot_golden_config.utilities.helper.Template") - def test_check_jinja_template_exceptions_templateerror(self, template_mock): + @patch("nornir_nautobot.utils.logger.NornirLogger") + @patch("nautobot.dcim.models.Device") + @patch("nautobot_golden_config.utilities.helper.render_jinja2") + def test_render_jinja_template_exceptions_templateerror(self, template_mock, mock_device, mock_nornir_logger): """Cause issue to cause TemplateError form Jinja2 Template.""" - log_mock = Mock() with self.assertRaises(NornirNautobotException): - template_mock.side_effect = TemplateError - template_render = check_jinja_template("test-obj", log_mock, "template") - self.assertEqual(template_render, TemplateError) - template_mock.assert_called_once() + with self.assertRaises(jinja_errors.TemplateError): + template_mock.side_effect = jinja_errors.TemplateRuntimeError + render_jinja_template(mock_device, mock_nornir_logger, "template") + mock_nornir_logger.log_failure.assert_called_once() diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index 7a660488..f849d54c 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -1,12 +1,12 @@ """Helper functions.""" # pylint: disable=raise-missing-from -from jinja2 import Template, StrictUndefined, UndefinedError -from jinja2.exceptions import TemplateError, TemplateSyntaxError +from jinja2 import exceptions as jinja_errors from nornir_nautobot.exceptions import NornirNautobotException from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device +from nautobot.utilities.utils import render_jinja2 from nautobot_golden_config import models @@ -63,17 +63,42 @@ def verify_global_settings(logger, global_settings, attrs): raise NornirNautobotException() -def check_jinja_template(obj, logger, template): - """Helper function to catch Jinja based issues and raise with proper NornirException.""" +def render_jinja_template(obj, logger, template): + """ + Helper function to render Jinja templates. + + Args: + obj (Device): The Device object from Nautobot. + logger (NornirLogger): Logger to log error messages to. + template (str): A Jinja2 template to be rendered. + + Returns: + str: The ``template`` rendered. + + Raises: + NornirNautobotException: When there is an error rendering the ``template``. + """ try: - template_rendered = Template(template, undefined=StrictUndefined).render(obj=obj) - return template_rendered - except UndefinedError as error: - logger.log_failure(obj, f"Jinja `{template}` has an error of `{error}`.") - raise NornirNautobotException() - except TemplateSyntaxError as error: - logger.log_failure(obj, f"Jinja `{template}` has an error of `{error}`.") - raise NornirNautobotException() - except TemplateError as error: - logger.log_failure(obj, f"Jinja `{template}` has an error of `{error}`.") - raise NornirNautobotException() + return render_jinja2(template_code=template, context={"obj": obj}) + except jinja_errors.UndefinedError as error: + error_msg = ( + "Jinja encountered and UndefinedError`, check the template for missing variable definitions.\n" + f"Template:\n{template}" + ) + logger.log_failure(obj, error_msg) + raise NornirNautobotException from error + except jinja_errors.TemplateSyntaxError as error: # Also catches subclass of TemplateAssertionError + error_msg = ( + f"Jinja encountered a SyntaxError at line number {error.lineno}," + f"check the template for invalid Jinja syntax.\nTemplate:\n{template}" + ) + logger.log_failure(obj, error_msg) + raise NornirNautobotException from error + # Intentionally not catching TemplateNotFound errors since template is passes as a string and not a filename + except jinja_errors.TemplateError as error: # Catches all remaining Jinja errors + error_msg = ( + "Jinja encountered an unexpected TemplateError; check the template for correctness\n" + f"Template:\n{template}" + ) + logger.log_failure(error_msg) + raise NornirNautobotException from error diff --git a/pyproject.toml b/pyproject.toml index 09be4e0e..f93585f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,8 +72,8 @@ exclude = ''' ) ''' [build-system] -requires = ["poetry>=0.12"] -build-backend = "poetry.masonry.api" +requires = ["poetry_core>=1.0.0"] +build-backend = "poetry.core.masonry.api" [tool.pylint.master] # Include the pylint_django plugin to avoid spurious warnings about Django patterns @@ -85,6 +85,9 @@ persistent="no" # Don't raise alarms if args/kwargs has an issue, as may be required, just as a decorator ignored-argument-names="args|kwargs" +# ignore files that should not be linted +ignore=["jinja_filters.py"] + [tool.pylint.basic] # No docstrings required for private methods (Pylint default), or for test_ functions, or for inner Meta classes. no-docstring-rgx="^(_|test_|Meta$)" diff --git a/tasks.py b/tasks.py index ec788c51..75f31069 100644 --- a/tasks.py +++ b/tasks.py @@ -274,7 +274,7 @@ def pydocstyle(context): Args: context (obj): Used to run specific commands """ - command = 'pydocstyle --config=.pydocstyle.ini --match-dir="^(?!migrations).*"' + command = "pydocstyle --config=.pydocstyle.ini" run_command(context, command) From c7cb014132a560bd7a776b5ce889bee29cf842d9 Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Tue, 7 Dec 2021 20:01:03 +0000 Subject: [PATCH 06/36] Add renovate.json --- renovate.json | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..7190a60b --- /dev/null +++ b/renovate.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json" +} From c925d71ac9c7e8fd7c0992a05268cf86aafe9983 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Wed, 15 Dec 2021 08:25:50 +0100 Subject: [PATCH 07/36] Add reference to Nornir plugin for installation Add reference to Nautobot Plugin Nornir in the installation procedure --- docs/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation.md b/docs/installation.md index a8de7b00..9da5f02b 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -8,7 +8,7 @@ Plugins can be installed manually or use Python's `pip`. See the [nautobot docum **Required:** The following block of code below shows the additional configuration required to be added to your `nautobot_config.py` file: - append `"nautobot_golden_config"` to the `PLUGINS` list -- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary +- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary, and `"nautobot_plugin_nornir"` if it was not already there (More info [here](https://github.com/nautobot/nautobot-plugin-nornir)) ```python PLUGINS = ["nautobot_plugin_nornir", "nautobot_golden_config"] From d805a07ecc0e4831ae973444e663d1334b150de8 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Wed, 15 Dec 2021 08:30:14 +0100 Subject: [PATCH 08/36] Reference value for sot_agg_transposer Use proper reference values for sot_agg_transposer --- docs/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation.md b/docs/installation.md index a8de7b00..0e7b3ccb 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -55,7 +55,7 @@ The plugin behavior can be controlled with the following list of settings. | enable_intended | True | True | A boolean to represent whether or not to generate intended configurations within the plugin. | | enable_sotagg | True | True | A boolean to represent whether or not to provide a GraphQL query per device to allow the intended configuration to provide data variables to the plugin. | | platform_slug_map | {"cisco_wlc": "cisco_aireos"} | None | A dictionary in which the key is the platform slug and the value is what netutils uses in any "network_os" parameter. | -| sot_agg_transposer | mypkg.transposer | - | A string representation of a function that can post-process the graphQL data. | +| sot_agg_transposer | "mypkg.transposer" | None | A string representation of a function that can post-process the graphQL data. | | per_feature_bar_width | 0.15 | 0.15 | The width of the table bar within the overview report | | per_feature_width | 13 | 13 | The width in inches that the overview table can be. | | per_feature_height | 4 | 4 | The height in inches that the overview table can be. | From e30ebc1ec13f2174117a139dea0e883de62e46d8 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Wed, 15 Dec 2021 14:21:18 +0100 Subject: [PATCH 09/36] Update installation.md --- docs/installation.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 9da5f02b..23a94534 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -7,8 +7,8 @@ Plugins can be installed manually or use Python's `pip`. See the [nautobot docum **Prerequisite:** The plugin relies on [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) to be installed and both plugins to be enabled in your configuration settings. **Required:** The following block of code below shows the additional configuration required to be added to your `nautobot_config.py` file: -- append `"nautobot_golden_config"` to the `PLUGINS` list -- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary, and `"nautobot_plugin_nornir"` if it was not already there (More info [here](https://github.com/nautobot/nautobot-plugin-nornir)) +- append `"nautobot_golden_config"` to the `PLUGINS` list, and `"nautobot_plugin_nornir"` if it was not already there (More info [here](https://github.com/nautobot/nautobot-plugin-nornir)) +- append the `"nautobot_golden_config"` dictionary to the `PLUGINS_CONFIG` dictionary, and `"nautobot_plugin_nornir"` if it was not already there. ```python PLUGINS = ["nautobot_plugin_nornir", "nautobot_golden_config"] From 49ed1b0ce961dec6316619b42dd1f90065f6ee20 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Wed, 15 Dec 2021 14:24:36 +0100 Subject: [PATCH 10/36] Fix markdown links in quick-start --- docs/quick-start.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/quick-start.md b/docs/quick-start.md index 5dd995ec..aa58e892 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -27,8 +27,8 @@ Follow the steps below to get up and running for the configuration backup elemen 4. Create Configuration Removals and Replacements. - 1. [Config Removals](./navigating-backup#config-removals) - 2. [Config Replacements](./navigating-backup#config-replacements) + 1. [Config Removals](./navigating-backup.md#config-removals) + 2. [Config Replacements](./navigating-backup.md#config-replacements) 5. Execute the Backup. From 0d7e3c2a1966f5549ed4ff9c8dd097d994e1c982 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Wed, 15 Dec 2021 14:27:27 +0100 Subject: [PATCH 11/36] More specific run backup instructions --- docs/quick-start.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/quick-start.md b/docs/quick-start.md index aa58e892..a94bf6bb 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -32,10 +32,10 @@ Follow the steps below to get up and running for the configuration backup elemen 5. Execute the Backup. - 1. Navigate to `Plugins -> Home`. + 1. Navigate to `Plugins -> Home` under the Golden Configuration Section. 2. Click on the `Execute` button and select `Backup`. 3. Select what to run the backup on. - 4. Run the Job. + 4. Run the Job by clicking "Run Job" button. > For in-depth details see [Navigating Backup](./navigating-backup.md) From 8fb14166345ded59675a7047d25b0a30973739cf Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Wed, 15 Dec 2021 23:02:59 +0000 Subject: [PATCH 12/36] Update mariadb Docker tag to v10.7 --- development/docker-compose.mysql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml index 02966e34..bde8b14e 100644 --- a/development/docker-compose.mysql.yml +++ b/development/docker-compose.mysql.yml @@ -1,7 +1,7 @@ --- services: db: - image: "mariadb:10.6" + image: "mariadb:10.7" env_file: - "dev.env" volumes: From 35a5edcc332972bcac1543d22a1577dee3954d0c Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Wed, 15 Dec 2021 23:03:02 +0000 Subject: [PATCH 13/36] Update postgres Docker tag to v14 --- development/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/docker-compose.yml b/development/docker-compose.yml index 905bdcd5..d18215b6 100644 --- a/development/docker-compose.yml +++ b/development/docker-compose.yml @@ -37,7 +37,7 @@ services: entrypoint: "nautobot-server celery worker -B -l INFO" <<: *nautobot-base db: - image: "postgres:13" + image: "postgres:14" env_file: - "dev.env" volumes: From 1558e8fd308a567d0356223ca9a70c07533da7e9 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Thu, 16 Dec 2021 09:27:53 -0500 Subject: [PATCH 14/36] Update Markdown dependency --- poetry.lock | 626 ++++++++++++++++++++++++++----------------------- pyproject.toml | 2 +- 2 files changed, 335 insertions(+), 293 deletions(-) diff --git a/poetry.lock b/poetry.lock index 54e5e7de..952aa9f4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,7 +11,7 @@ contextvars = {version = "2.4", markers = "python_version < \"3.7\""} [[package]] name = "amqp" -version = "5.0.6" +version = "5.0.7" description = "Low-level AMQP client for Python (fork of amqplib)." category = "main" optional = false @@ -110,7 +110,7 @@ python-versions = "*" [[package]] name = "black" -version = "21.11b1" +version = "21.12b0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -122,7 +122,6 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" -regex = ">=2021.4.4" tomli = ">=0.2.6,<2.0.0" typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ @@ -217,7 +216,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.8" +version = "2.0.9" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -228,7 +227,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "ciscoconfparse" -version = "1.6.7" +version = "1.6.10" description = "Parse, Audit, Query, Build, and Modify Cisco IOS-style and JunOS-style configurations" category = "main" optional = false @@ -236,7 +235,7 @@ python-versions = ">=3.6" [package.dependencies] dnspython = ">=2.1.0,<3.0.0" -ipaddr = "*" +ipaddr = ">=2.1.11" loguru = "0.5.3" passlib = ">=1.7.4,<2.0.0" toml = "0.10.2" @@ -333,7 +332,7 @@ jinja2 = "*" [[package]] name = "cryptography" -version = "36.0.0" +version = "36.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -404,7 +403,7 @@ dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "im [[package]] name = "django" -version = "3.1.13" +version = "3.1.14" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." category = "main" optional = false @@ -444,6 +443,35 @@ funcy = ">=1.8,<2.0" redis = ">=3.0.0" six = ">=1.4.0" +[[package]] +name = "django-celery-beat" +version = "2.2.1" +description = "Database-backed Periodic Tasks." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +celery = ">=5.0,<6.0" +Django = ">=2.2,<4.0" +django-timezone-field = ">=4.1.0,<5.0" +python-crontab = ">=2.3.4" + +[[package]] +name = "django-constance" +version = "2.8.0" +description = "Django live settings with pluggable backends, including Redis." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django-picklefield = {version = "*", optional = true, markers = "extra == \"database\""} + +[package.extras] +database = ["django-picklefield"] +redis = ["redis"] + [[package]] name = "django-cors-headers" version = "3.7.0" @@ -480,7 +508,7 @@ Django = "*" [[package]] name = "django-debug-toolbar" -version = "3.2.2" +version = "3.2.4" description = "A configurable set of panels that display various debug information about the current request/response." category = "dev" optional = false @@ -490,6 +518,17 @@ python-versions = ">=3.6" Django = ">=2.2" sqlparse = ">=0.2.0" +[[package]] +name = "django-extensions" +version = "3.1.5" +description = "Extensions for Django" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" + [[package]] name = "django-filter" version = "2.4.0" @@ -503,7 +542,7 @@ Django = ">=2.2" [[package]] name = "django-health-check" -version = "3.16.4" +version = "3.16.5" description = "Run checks on services like databases, queue servers, celery processes, etc." category = "main" optional = false @@ -544,6 +583,20 @@ python-versions = ">=3.5" Django = ">=1.11" django-js-asset = "*" +[[package]] +name = "django-picklefield" +version = "3.0.1" +description = "Pickled object field for Django" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +Django = ">=2.2" + +[package.extras] +tests = ["tox"] + [[package]] name = "django-pivot" version = "1.8.1" @@ -704,17 +757,17 @@ validation = ["swagger-spec-validator (>=2.1.0)"] [[package]] name = "flake8" -version = "4.0.1" +version = "3.9.2" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "funcy" @@ -745,7 +798,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.20" +version = "3.1.18" description = "Python Git Library" category = "main" optional = false @@ -753,7 +806,7 @@ python-versions = ">=3.6" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""} [[package]] name = "graphene" @@ -796,6 +849,14 @@ dev = ["black (==19.10b0)", "flake8 (==3.7.9)", "flake8-black (==0.1.1)", "flake rest_framework = ["djangorestframework (>=3.6.3)"] test = ["pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] +[[package]] +name = "graphene-django-optimizer" +version = "0.8.0" +description = "Optimize database access inside graphene queries." +category = "main" +optional = false +python-versions = "*" + [[package]] name = "graphql-core" version = "2.3.2" @@ -850,7 +911,7 @@ test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.91 [[package]] name = "importlib-metadata" -version = "3.4.0" +version = "4.4.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -862,7 +923,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" @@ -1018,11 +1079,11 @@ zookeeper = ["kazoo (>=1.3.1)"] [[package]] name = "lazy-object-proxy" -version = "1.6.0" +version = "1.7.1" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.6" [[package]] name = "loguru" @@ -1042,7 +1103,7 @@ dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "tox (>=3 [[package]] name = "lxml" -version = "4.6.4" +version = "4.7.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -1056,14 +1117,14 @@ source = ["Cython (>=0.29.7)"] [[package]] name = "markdown" -version = "3.3.4" +version = "3.3.6" description = "Python implementation of Markdown." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] testing = ["coverage", "pyyaml"] @@ -1135,7 +1196,7 @@ textfsm = "*" [[package]] name = "nautobot" -version = "1.1.4" +version = "1.2.0" description = "Source of truth and network automation platform." category = "main" optional = false @@ -1145,9 +1206,12 @@ python-versions = ">=3.6,<4.0" celery = ">=5.1.0,<5.2.0" Django = ">=3.1.12,<3.2.0" django-cacheops = ">=5.1,<5.2" +django-celery-beat = ">=2.2.0,<3.0.0" +django-constance = {version = ">=2.8.0,<2.9.0", extras = ["database"]} django-cors-headers = ">=3.7.0,<3.8.0" django-cryptography = ">=1.0,<1.1" django-db-file-storage = ">=0.5.5,<0.6.0" +django-extensions = ">=3.1.5,<3.2.0" django-filter = ">=2.4.0,<2.5.0" django-health-check = ">=3.16.4,<4.0.0" django-jinja = "<2.8.0" @@ -1161,13 +1225,15 @@ django-timezone-field = ">=4.1.2,<4.2.0" django-webserver = ">=1.2.0,<1.3.0" djangorestframework = ">=3.12.4,<3.13.0" drf-yasg = {version = ">=1.20.0,<1.21.0", extras = ["validation"]} -GitPython = ">=3.1.15,<3.2.0" +GitPython = "3.1.18" graphene-django = ">=2.15.0,<2.16.0" -importlib-metadata = {version = ">=3.4.0,<3.5.0", markers = "python_version < \"3.8\""} +graphene-django-optimizer = ">=0.8.0,<0.9.0" +importlib-metadata = {version = ">=4.4,<4.5", markers = "python_version < \"3.10\""} Jinja2 = ">=2.11.3,<2.12.0" jsonschema = ">=3.2.0,<3.3.0" -Markdown = ">=3.3.4,<3.4.0" +Markdown = ">=3.3.6,<3.4.0" netaddr = ">=0.8.0,<0.9.0" +netutils = ">=1.0.0,<1.1.0" Pillow = ">=8.3.2,<8.4.0" psycopg2-binary = ">=2.8.6,<2.9.0" pycryptodome = ">=3.10.1,<3.11.0" @@ -1177,7 +1243,12 @@ social-auth-app-django = ">=4.0.0,<5.0.0" svgwrite = ">=1.4.1,<1.5.0" [package.extras] +all = ["django-auth-ldap (>=3.0.0,<3.1.0)", "django-storages (>=1.12.3,<1.13.0)", "mysqlclient (>=2.0.3,<2.1.0)", "napalm (>=3.3.1,<4.0.0)", "social-auth-core[saml,openidconnect] (>=4.1.0,<4.2.0)"] +ldap = ["django-auth-ldap (>=3.0.0,<3.1.0)"] +remote_storage = ["django-storages (>=1.12.3,<1.13.0)"] mysql = ["mysqlclient (>=2.0.3,<2.1.0)"] +napalm = ["napalm (>=3.3.1,<4.0.0)"] +sso = ["social-auth-core[saml,openidconnect] (>=4.1.0,<4.2.0)"] [[package]] name = "nautobot-plugin-nornir" @@ -1237,7 +1308,7 @@ test = ["pyyaml (>=5.1.2)", "pytest (>=5.1.2)"] [[package]] name = "netutils" -version = "0.2.5" +version = "1.0.0" description = "Common helper functions useful in network automation." category = "main" optional = false @@ -1286,14 +1357,14 @@ nornir = ">=3,<4" [[package]] name = "nornir-nautobot" -version = "2.2.0" +version = "2.2.1" description = "Nornir Nautobot" category = "main" optional = false python-versions = ">=3.6,<4.0" [package.dependencies] -netutils = ">=0,<1" +netutils = ">=1,<2" nornir = ">=3.0.0,<4.0.0" nornir-jinja2 = ">=0,<1" nornir-napalm = ">=0,<1" @@ -1378,7 +1449,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "paramiko" -version = "2.8.0" +version = "2.8.1" description = "SSH2 protocol library" category = "main" optional = false @@ -1491,11 +1562,11 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" [[package]] name = "pycodestyle" -version = "2.8.0" +version = "2.7.0" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycparser" @@ -1544,7 +1615,7 @@ test = ["coverage", "mock"] [[package]] name = "pyflakes" -version = "2.4.0" +version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false @@ -1626,7 +1697,7 @@ tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] [[package]] name = "pynautobot" -version = "1.0.3" +version = "1.0.4" description = "Nautobot API client library" category = "main" optional = false @@ -1668,6 +1739,21 @@ python-versions = "*" [package.extras] cp2110 = ["hidapi"] +[[package]] +name = "python-crontab" +version = "2.6.0" +description = "Python Crontab API" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +python-dateutil = "*" + +[package.extras] +cron-description = ["cron-descriptor"] +cron-schedule = ["croniter"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -1732,14 +1818,6 @@ deprecated = "*" [package.extras] hiredis = ["hiredis (>=1.0.0)"] -[[package]] -name = "regex" -version = "2021.11.10" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "requests" version = "2.26.0" @@ -1775,7 +1853,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rq" -version = "1.10.0" +version = "1.10.1" description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." category = "main" optional = false @@ -1818,7 +1896,7 @@ python-versions = "*" [[package]] name = "scp" -version = "0.14.1" +version = "0.14.2" description = "scp module for paramiko" category = "main" optional = false @@ -1984,7 +2062,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.2" +version = "1.2.3" description = "A lil' TOML parser" category = "dev" optional = false @@ -2007,7 +2085,7 @@ test = ["pytest"] [[package]] name = "typed-ast" -version = "1.5.0" +version = "1.5.1" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -2060,7 +2138,7 @@ python-versions = "*" [[package]] name = "win32-setctime" -version = "1.0.3" +version = "1.0.4" description = "A small Python utility to set file creation time on Windows" category = "main" optional = false @@ -2115,7 +2193,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "a6a5edda2ec37927958c11ef415aae09057bd447250bb5d9644d144bef54f874" +content-hash = "d4ecd6f04c9ca5f7751e58b5045a69ef4fad75e2816ea0230f6b668d667e7af9" [metadata.files] aiocontextvars = [ @@ -2123,8 +2201,8 @@ aiocontextvars = [ {file = "aiocontextvars-0.2.2.tar.gz", hash = "sha256:f027372dc48641f683c559f247bd84962becaacdc9ba711d583c3871fb5652aa"}, ] amqp = [ - {file = "amqp-5.0.6-py3-none-any.whl", hash = "sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb"}, - {file = "amqp-5.0.6.tar.gz", hash = "sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2"}, + {file = "amqp-5.0.7-py3-none-any.whl", hash = "sha256:4d9cb6b5d69183ba279e97382ff68a071864c25b561d206dab73499d3ed26d1c"}, + {file = "amqp-5.0.7.tar.gz", hash = "sha256:d757b78fd7d3c6bb60e3ee811b68145583643747ed3ec253329f086aa3a72a5d"}, ] aniso8601 = [ {file = "aniso8601-7.0.0-py2.py3-none-any.whl", hash = "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b"}, @@ -2160,8 +2238,8 @@ billiard = [ {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, ] black = [ - {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, - {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, + {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, + {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] cached-property = [ {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, @@ -2228,12 +2306,12 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, - {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, + {file = "charset-normalizer-2.0.9.tar.gz", hash = "sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"}, + {file = "charset_normalizer-2.0.9-py3-none-any.whl", hash = "sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721"}, ] ciscoconfparse = [ - {file = "ciscoconfparse-1.6.7-py3-none-any.whl", hash = "sha256:8c7b23e859f694b5c9293a557e0759445e6b9668d6705d38e057d8d7e64926c5"}, - {file = "ciscoconfparse-1.6.7.tar.gz", hash = "sha256:953c83c330aac81f17a7fbe783b728f1cfc95e77f34a3382baa6e2a3d17d45c6"}, + {file = "ciscoconfparse-1.6.10-py3-none-any.whl", hash = "sha256:2e7c5a3c9b0b12df9825c850ff78a8a3b7d1adda8c62f3ac1b6aa0746f97ec23"}, + {file = "ciscoconfparse-1.6.10.tar.gz", hash = "sha256:aa0329e240013b8a72cff8f1c749e565ed00554f64dc0bf20889054aea0ba640"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -2266,27 +2344,26 @@ coreschema = [ {file = "coreschema-0.0.4.tar.gz", hash = "sha256:9503506007d482ab0867ba14724b93c18a33b22b6d19fb419ef2d239dd4a1607"}, ] cryptography = [ - {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:9511416e85e449fe1de73f7f99b21b3aa04fba4c4d335d30c486ba3756e3a2a6"}, - {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:97199a13b772e74cdcdb03760c32109c808aff7cd49c29e9cf4b7754bb725d1d"}, - {file = "cryptography-36.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:494106e9cd945c2cadfce5374fa44c94cfadf01d4566a3b13bb487d2e6c7959e"}, - {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fbbbb8aab4053fa018984bb0e95a16faeb051dd8cca15add2a27e267ba02b58"}, - {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:684993ff6f67000a56454b41bdc7e015429732d65a52d06385b6e9de6181c71e"}, - {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c702855cd3174666ef0d2d13dcc879090aa9c6c38f5578896407a7028f75b9f"}, - {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d91bc9f535599bed58f6d2e21a2724cb0c3895bf41c6403fe881391d29096f1d"}, - {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b17d83b3d1610e571fedac21b2eb36b816654d6f7496004d6a0d32f99d1d8120"}, - {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8982c19bb90a4fa2aad3d635c6d71814e38b643649b4000a8419f8691f20ac44"}, - {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:24469d9d33217ffd0ce4582dfcf2a76671af115663a95328f63c99ec7ece61a4"}, - {file = "cryptography-36.0.0-cp36-abi3-win32.whl", hash = "sha256:f6a5a85beb33e57998dc605b9dbe7deaa806385fdf5c4810fb849fcd04640c81"}, - {file = "cryptography-36.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:2deab5ec05d83ddcf9b0916319674d3dae88b0e7ee18f8962642d3cde0496568"}, - {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2049f8b87f449fc6190350de443ee0c1dd631f2ce4fa99efad2984de81031681"}, - {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a776bae1629c8d7198396fd93ec0265f8dd2341c553dc32b976168aaf0e6a636"}, - {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:aa94d617a4cd4cdf4af9b5af65100c036bce22280ebb15d8b5262e8273ebc6ba"}, - {file = "cryptography-36.0.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5c49c9e8fb26a567a2b3fa0343c89f5d325447956cc2fc7231c943b29a973712"}, - {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef216d13ac8d24d9cd851776662f75f8d29c9f2d05cdcc2d34a18d32463a9b0b"}, - {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231c4a69b11f6af79c1495a0e5a85909686ea8db946935224b7825cfb53827ed"}, - {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f92556f94e476c1b616e6daec5f7ddded2c082efa7cee7f31c7aeda615906ed8"}, - {file = "cryptography-36.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d73e3a96c38173e0aa5646c31bf8473bc3564837977dd480f5cbeacf1d7ef3a3"}, - {file = "cryptography-36.0.0.tar.gz", hash = "sha256:52f769ecb4ef39865719aedc67b4b7eae167bafa48dbc2a26dd36fa56460507f"}, + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, + {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, + {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, + {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, ] cycler = [ {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, @@ -2309,8 +2386,8 @@ deprecated = [ {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, ] django = [ - {file = "Django-3.1.13-py3-none-any.whl", hash = "sha256:a6e0d1ff11095b7394c079ade7094c73b2dc3df4a7a373c9b58ed73b77a97feb"}, - {file = "Django-3.1.13.tar.gz", hash = "sha256:9f8be75646f62204320b195062b1d696ba28aa3d45ee72fb7c888ffaebc5bdb2"}, + {file = "Django-3.1.14-py3-none-any.whl", hash = "sha256:0fabc786489af16ad87a8c170ba9d42bfd23f7b699bd5ef05675864e8d012859"}, + {file = "Django-3.1.14.tar.gz", hash = "sha256:72a4a5a136a214c39cf016ccdd6b69e2aa08c7479c66d93f3a9b5e4bb9d8a347"}, ] django-appconf = [ {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, @@ -2320,6 +2397,14 @@ django-cacheops = [ {file = "django-cacheops-5.1.tar.gz", hash = "sha256:d5851cd7bf3087384a1fcecfa8dddb8f55030eedfd6fdf127225b75bca0f99dd"}, {file = "django_cacheops-5.1-py2.py3-none-any.whl", hash = "sha256:02320df37754c143477c5fbd54abf292bcd8b4ca40631e19ec15efa3cc085532"}, ] +django-celery-beat = [ + {file = "django-celery-beat-2.2.1.tar.gz", hash = "sha256:97ae5eb309541551bdb07bf60cc57cadacf42a74287560ced2d2c06298620234"}, + {file = "django_celery_beat-2.2.1-py2.py3-none-any.whl", hash = "sha256:ab43049634fd18dc037927d7c2c7d5f67f95283a20ebbda55f42f8606412e66c"}, +] +django-constance = [ + {file = "django-constance-2.8.0.tar.gz", hash = "sha256:0a492454acc78799ce7b9f7a28a00c53427d513f34f8bf6fdc90a46d8864b2af"}, + {file = "django_constance-2.8.0-py3-none-any.whl", hash = "sha256:60fec73e397d5f4f7440f611b18d3e7ce5342647f316fedc47b62e1411c849e7"}, +] django-cors-headers = [ {file = "django-cors-headers-3.7.0.tar.gz", hash = "sha256:96069c4aaacace786a34ee7894ff680780ec2644e4268b31181044410fecd12e"}, {file = "django_cors_headers-3.7.0-py3-none-any.whl", hash = "sha256:1ac2b1213de75a251e2ba04448da15f99bcfcbe164288ae6b5ff929dc49b372f"}, @@ -2332,16 +2417,20 @@ django-db-file-storage = [ {file = "django-db-file-storage-0.5.5.tar.gz", hash = "sha256:5d5da694b78ab202accab4508b958e0e37b3d146310e76f6f6125e1bdeaaad14"}, ] django-debug-toolbar = [ - {file = "django-debug-toolbar-3.2.2.tar.gz", hash = "sha256:8c5b13795d4040008ee69ba82dcdd259c49db346cf7d0de6e561a49d191f0860"}, - {file = "django_debug_toolbar-3.2.2-py3-none-any.whl", hash = "sha256:d7bab7573fab35b0fd029163371b7182f5826c13da69734beb675c761d06a4d3"}, + {file = "django-debug-toolbar-3.2.4.tar.gz", hash = "sha256:644bbd5c428d3283aa9115722471769cac1bec189edf3a0c855fd8ff870375a9"}, + {file = "django_debug_toolbar-3.2.4-py3-none-any.whl", hash = "sha256:6b633b6cfee24f232d73569870f19aa86c819d750e7f3e833f2344a9eb4b4409"}, +] +django-extensions = [ + {file = "django-extensions-3.1.5.tar.gz", hash = "sha256:28e1e1bf49f0e00307ba574d645b0af3564c981a6dfc87209d48cb98f77d0b1a"}, + {file = "django_extensions-3.1.5-py3-none-any.whl", hash = "sha256:9238b9e016bb0009d621e05cf56ea8ce5cce9b32e91ad2026996a7377ca28069"}, ] django-filter = [ {file = "django-filter-2.4.0.tar.gz", hash = "sha256:84e9d5bb93f237e451db814ed422a3a625751cbc9968b484ecc74964a8696b06"}, {file = "django_filter-2.4.0-py3-none-any.whl", hash = "sha256:e00d32cebdb3d54273c48f4f878f898dced8d5dfaad009438fe61ebdf535ace1"}, ] django-health-check = [ - {file = "django-health-check-3.16.4.tar.gz", hash = "sha256:334bcbbb9273a6dbd9c928e78474306e623dfb38cc442281cb9fd230a20a7fdb"}, - {file = "django_health_check-3.16.4-py2.py3-none-any.whl", hash = "sha256:86a8869d67e72394a1dd73e37819a7d2cfd915588b96927fda611d7451fd4735"}, + {file = "django-health-check-3.16.5.tar.gz", hash = "sha256:1edfd49293ccebbce29f9da609c407f307aee240ab799ab4201031341ae78c0f"}, + {file = "django_health_check-3.16.5-py2.py3-none-any.whl", hash = "sha256:8d66781a0ea82b1a8b44878187b38a27370e94f18287312e39be0593e72d8983"}, ] django-jinja = [ {file = "django-jinja-2.7.1.tar.gz", hash = "sha256:0d2c90ccc4763f67b07ace2b8a2f23df16d2995b4dc841597443fb4eea746505"}, @@ -2355,6 +2444,10 @@ django-mptt = [ {file = "django-mptt-0.11.0.tar.gz", hash = "sha256:dfdb3af75ad27cdd4458b0544ec8574174f2b90f99bc2cafab6a15b4bc1895a8"}, {file = "django_mptt-0.11.0-py2.py3-none-any.whl", hash = "sha256:90eb236eb4f1a92124bd7c37852bbe09c0d21158477cc237556d59842a91c509"}, ] +django-picklefield = [ + {file = "django-picklefield-3.0.1.tar.gz", hash = "sha256:15ccba592ca953b9edf9532e64640329cd47b136b7f8f10f2939caa5f9ce4287"}, + {file = "django_picklefield-3.0.1-py3-none-any.whl", hash = "sha256:3c702a54fde2d322fe5b2f39b8f78d9f655b8f77944ab26f703be6c0ed335a35"}, +] django-pivot = [ {file = "django-pivot-1.8.1.tar.gz", hash = "sha256:7184d3e3f5e96003150428bea106a9963f49f0431fa56f93595316c9b42bcca6"}, {file = "django_pivot-1.8.1-py3-none-any.whl", hash = "sha256:9bf83b2b61d4dc95c01e5b7a595ee223c5c1f08a4590733673a306b1513174d4"}, @@ -2400,8 +2493,8 @@ drf-yasg = [ {file = "drf_yasg-1.20.0-py2.py3-none-any.whl", hash = "sha256:8b72e5b1875931a8d11af407be3a9a5ba8776541492947a0df5bafda6b7f8267"}, ] flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] funcy = [ {file = "funcy-1.16-py2.py3-none-any.whl", hash = "sha256:1d3fc5d42cf7564a6b2be04042d0df7a50c77903cf760a34786d0c9ebd659b25"}, @@ -2415,8 +2508,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, - {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, + {file = "GitPython-3.1.18-py3-none-any.whl", hash = "sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"}, + {file = "GitPython-3.1.18.tar.gz", hash = "sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b"}, ] graphene = [ {file = "graphene-2.1.9-py2.py3-none-any.whl", hash = "sha256:3d446eb1237c551052bc31155cf1a3a607053e4f58c9172b83a1b597beaa0868"}, @@ -2426,6 +2519,9 @@ graphene-django = [ {file = "graphene-django-2.15.0.tar.gz", hash = "sha256:b78c9b05bc899016b9cc5bf13faa1f37fe1faa8c5407552c6ddd1a28f46fc31a"}, {file = "graphene_django-2.15.0-py2.py3-none-any.whl", hash = "sha256:02671d195f0c09c8649acff2a8f4ad4f297d0f7d98ea6e6cdf034b81bab92880"}, ] +graphene-django-optimizer = [ + {file = "graphene-django-optimizer-0.8.0.tar.gz", hash = "sha256:79269880d59d0a35d41751ddcb419220c4ad3871960416371119f447cb2e1a77"}, +] graphql-core = [ {file = "graphql-core-2.3.2.tar.gz", hash = "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746"}, {file = "graphql_core-2.3.2-py2.py3-none-any.whl", hash = "sha256:44c9bac4514e5e30c5a595fac8e3c76c1975cae14db215e8174c7fe995825bad"}, @@ -2468,8 +2564,8 @@ immutables = [ {file = "immutables-0.16.tar.gz", hash = "sha256:d67e86859598eed0d926562da33325dac7767b7b1eff84e232c22abea19f4360"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"}, - {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"}, + {file = "importlib_metadata-4.4.0-py3-none-any.whl", hash = "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786"}, + {file = "importlib_metadata-4.4.0.tar.gz", hash = "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, @@ -2551,98 +2647,113 @@ kombu = [ {file = "kombu-5.1.0.tar.gz", hash = "sha256:01481d99f4606f6939cdc9b637264ed353ee9e3e4f62cfb582324142c41a572d"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, + {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, + {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] loguru = [ {file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"}, {file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"}, ] lxml = [ - {file = "lxml-4.6.4-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bbf2dc330bd44bfc0254ab37677ec60f7c7ecea55ad8ba1b8b2ea7bf20c265f5"}, - {file = "lxml-4.6.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b667c51682fe9b9788c69465956baa8b6999531876ccedcafc895c74ad716cd8"}, - {file = "lxml-4.6.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:72e730d33fe2e302fd07285f14624fca5e5e2fb2bb4fb2c3941e318c41c443d1"}, - {file = "lxml-4.6.4-cp27-cp27m-win32.whl", hash = "sha256:433df8c7dde0f9e41cbf4f36b0829d50a378116ef5e962ba3881f2f5f025c7be"}, - {file = "lxml-4.6.4-cp27-cp27m-win_amd64.whl", hash = "sha256:35752ee40f7bbf6adc9ff4e1f4b84794a3593736dcce80db32e3c2aa85e294ac"}, - {file = "lxml-4.6.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ff5bb2a198ea67403bb6818705e9a4f90e0313f2215428ec51001ce56d939fb"}, - {file = "lxml-4.6.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b87727561c1150c0cc91c5d9d389448b37a7d15f0ba939ed3d1acb2f11bf6c5"}, - {file = "lxml-4.6.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:45fdb2899c755138722797161547a40b3e2a06feda620cc41195ee7e97806d81"}, - {file = "lxml-4.6.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:38b9de0de3aa689fe9fb9877ae1be1e83b8cf9621f7e62049d0436b9ecf4ad64"}, - {file = "lxml-4.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:662523cd2a0246740225c7e32531f2e766544122e58bee70e700a024cfc0cf81"}, - {file = "lxml-4.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:4aa349c5567651f34d4eaae7de6ed5b523f6d70a288f9c6fbac22d13a0784e04"}, - {file = "lxml-4.6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08eb9200d88b376a8ed5e50f1dc1d1a45b49305169674002a3b5929943390591"}, - {file = "lxml-4.6.4-cp310-cp310-win32.whl", hash = "sha256:bdc224f216ead849e902151112efef6e96c41ee1322e15d4e5f7c8a826929aee"}, - {file = "lxml-4.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ab6db93a2b6b66cbf62b4e4a7135f476e708e8c5c990d186584142c77d7f975a"}, - {file = "lxml-4.6.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50790313df028aa05cf22be9a8da033b86c42fa32523e4fd944827b482b17bf0"}, - {file = "lxml-4.6.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6764998345552b1dfc9326a932d2bad6367c6b37a176bb73ada6b9486bf602f7"}, - {file = "lxml-4.6.4-cp35-cp35m-win32.whl", hash = "sha256:543b239b191bb3b6d9bef5f09f1fb2be5b7eb09ab4d386aa655e4d53fbe9ff47"}, - {file = "lxml-4.6.4-cp35-cp35m-win_amd64.whl", hash = "sha256:a75c1ad05eedb1a3ff2a34a52a4f0836cfaa892e12796ba39a7732c82701eff4"}, - {file = "lxml-4.6.4-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:47e955112ce64241fdb357acf0216081f9f3255b3ac9c502ca4b3323ec1ca558"}, - {file = "lxml-4.6.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:20d7c8d90d449c6a353b15ee0459abae8395dbe59ad01e406ccbf30cd81c6f98"}, - {file = "lxml-4.6.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:240db6f3228d26e3c6f4fad914b9ddaaf8707254e8b3efd564dc680c8ec3c264"}, - {file = "lxml-4.6.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:351482da8dd028834028537f08724b1de22d40dcf3bb723b469446564f409074"}, - {file = "lxml-4.6.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e678a643177c0e5ec947b645fa7bc84260dfb9b6bf8fb1fdd83008dfc2ca5928"}, - {file = "lxml-4.6.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:15d0381feb56f08f78c5cc4fc385ddfe0bde1456e37f54a9322833371aec4060"}, - {file = "lxml-4.6.4-cp36-cp36m-win32.whl", hash = "sha256:4ba74afe5ee5cb5e28d83b513a6e8f0875fda1dc1a9aea42cc0065f029160d2a"}, - {file = "lxml-4.6.4-cp36-cp36m-win_amd64.whl", hash = "sha256:9c91a73971a922c13070fd8fa5a114c858251791ba2122a941e6aa781c713e44"}, - {file = "lxml-4.6.4-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:6020c70ff695106bf80651953a23e37718ef1fee9abd060dcad8e32ab2dc13f3"}, - {file = "lxml-4.6.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f5dd358536b8a964bf6bd48de038754c1609e72e5f17f5d21efe2dda17594dbf"}, - {file = "lxml-4.6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7ae7089d81fc502df4b217ad77f03c54039fe90dac0acbe70448d7e53bfbc57e"}, - {file = "lxml-4.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:80d10d53d3184837445ff8562021bdd37f57c4cadacbf9d8726cc16220a00d54"}, - {file = "lxml-4.6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e95da348d57eb448d226a44b868ff2ca5786fbcbe417ac99ff62d0a7d724b9c7"}, - {file = "lxml-4.6.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ffd65cfa33fed01735c82aca640fde4cc63f0414775cba11e06f84fae2085a6e"}, - {file = "lxml-4.6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:877666418598f6cb289546c77ff87590cfd212f903b522b0afa0b9fb73b3ccfb"}, - {file = "lxml-4.6.4-cp37-cp37m-win32.whl", hash = "sha256:e91d24623e747eeb2d8121f4a94c6a7ad27dc48e747e2dc95bfe88632bd028a2"}, - {file = "lxml-4.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:4ec9a80dd5704ecfde54319b6964368daf02848c8954d3bacb9b64d1c7659159"}, - {file = "lxml-4.6.4-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:2901625f4a878a055d275beedc20ba9cb359cefc4386a967222fee29eb236038"}, - {file = "lxml-4.6.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b567178a74a2261345890eac66fbf394692a6e002709d329f28a673ca6042473"}, - {file = "lxml-4.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4717123f7c11c81e0da69989e5a64079c3f402b0efeb4c6241db6c369d657bd8"}, - {file = "lxml-4.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:cf201bf5594d1aab139fe53e3fca457e4f8204a5bbd65d48ab3b82a16f517868"}, - {file = "lxml-4.6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a77a3470ba37e11872c75ca95baf9b3312133a3d5a5dc720803b23098c653976"}, - {file = "lxml-4.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:619c6d2b552bba00491e96c0518aad94002651c108a0f7364ff2d7798812c00e"}, - {file = "lxml-4.6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:601f0ab75538b280aaf1e720eb9d68d4fa104ac274e1e9e6971df488f4dcdb0f"}, - {file = "lxml-4.6.4-cp38-cp38-win32.whl", hash = "sha256:75d3c5bbc0ddbad03bb68b9be638599f67e4b98ed3dcd0fec9f6f39e41ee96cb"}, - {file = "lxml-4.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:4341d135f5660db10184963d9c3418c3e28d7f868aaf8b11a323ebf85813f7f4"}, - {file = "lxml-4.6.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:9db24803fa71e3305fe4a7812782b708da21a0b774b130dd1860cf40a6d7a3ee"}, - {file = "lxml-4.6.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:afd60230ad9d8bcba005945ec3a343722f09e0b7f8ae804246e5d2cfc6bd71a6"}, - {file = "lxml-4.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0c15e1cd55055956e77b0732270f1c6005850696bc3ef3e03d01e78af84eaa42"}, - {file = "lxml-4.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d422b3c729737d8a39279a25fa156c983a56458f8b2f97661ee6fb22b80b1d6"}, - {file = "lxml-4.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2eb90f6ec3c236ef2f1bb38aee7c0d23e77d423d395af6326e7cca637519a4cb"}, - {file = "lxml-4.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:51a0e5d243687596f46e24e464121d4b232ad772e2d1785b2a2c0eb413c285d4"}, - {file = "lxml-4.6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d43bd68714049c84e297c005456a15ecdec818f7b5aa5868c8b0a865cfb78a44"}, - {file = "lxml-4.6.4-cp39-cp39-win32.whl", hash = "sha256:ee9e4b07b0eba4b6a521509e9e1877476729c1243246b6959de697ebea739643"}, - {file = "lxml-4.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:48eaac2991b3036175b42ee8d3c23f4cca13f2be8426bf29401a690ab58c88f4"}, - {file = "lxml-4.6.4-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:2b06a91cf7b8acea7793006e4ae50646cef0fe35ce5acd4f5cb1c77eb228e4a1"}, - {file = "lxml-4.6.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:523f195948a1ba4f9f5b7294d83c6cd876547dc741820750a7e5e893a24bbe38"}, - {file = "lxml-4.6.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b0ca0ada9d3bc18bd6f611bd001a28abdd49ab9698bd6d717f7f5394c8e94628"}, - {file = "lxml-4.6.4-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:197b7cb7a753cf553a45115739afd8458464a28913da00f5c525063f94cd3f48"}, - {file = "lxml-4.6.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6298f5b42a26581206ef63fffa97c754245d329414108707c525512a5197f2ba"}, - {file = "lxml-4.6.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0b12c95542f04d10cba46b3ff28ea52ea56995b78cf918f0b11b05e75812bb79"}, - {file = "lxml-4.6.4.tar.gz", hash = "sha256:daf9bd1fee31f1c7a5928b3e1059e09a8d683ea58fb3ffc773b6c88cb8d1399c"}, + {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, + {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, + {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, + {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, + {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, + {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, + {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, + {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, + {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, + {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, + {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, + {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, + {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, + {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, + {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, + {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, + {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, + {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, + {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, + {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, + {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, + {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, + {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, + {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, + {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, + {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, ] markdown = [ - {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, - {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, + {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, + {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, @@ -2755,8 +2866,8 @@ napalm = [ {file = "napalm-3.3.1.tar.gz", hash = "sha256:6fe46d5b4ca761a2bde8ea2bea3e0b808a907afc808c6c67178060eb323320e8"}, ] nautobot = [ - {file = "nautobot-1.1.4-py3-none-any.whl", hash = "sha256:355773e30aa2d7da187e610751691cb0bde5314933a14d45c3e140c6410d718f"}, - {file = "nautobot-1.1.4.tar.gz", hash = "sha256:a4feff233bf7255a1deadb025c023a96b97a18a3cf676af151ea02c2896b709c"}, + {file = "nautobot-1.2.0-py3-none-any.whl", hash = "sha256:0c4197c0a476cd56d02dfc3ff051d779e3a7b39ed4c1f9cc41f659ac048e2cbb"}, + {file = "nautobot-1.2.0.tar.gz", hash = "sha256:7c1816ef3b55f4dab9156213c0e93dfb2c8b1c2c7c3b38a3cc2320861d80578a"}, ] nautobot-plugin-nornir = [ {file = "nautobot-plugin-nornir-0.9.7.tar.gz", hash = "sha256:e71cdbce4a66e3f801cf4931034c5bc9fe8a4e4ca606a80286c286a480f04cd0"}, @@ -2774,8 +2885,8 @@ netmiko = [ {file = "netmiko-3.4.0.tar.gz", hash = "sha256:acadb9dd97864ee848e2032f1f0e301c7b31e7a4153757d98f5c8ba1b9614993"}, ] netutils = [ - {file = "netutils-0.2.5-py3-none-any.whl", hash = "sha256:6715b0e1175051f5db7685bf057ede68c7dafc56cf705a1ca3896e24980b22ce"}, - {file = "netutils-0.2.5.tar.gz", hash = "sha256:bdfd8e454c4aeacf59c1537bfd434e1d944ce69032fed533f0171c2532c91751"}, + {file = "netutils-1.0.0-py3-none-any.whl", hash = "sha256:f6e695dc761f41c68d3b2b9763f6ac3bc636d8b3c70c9886dae2655b2eab5c2b"}, + {file = "netutils-1.0.0.tar.gz", hash = "sha256:ead1d927374a76a9ff78867b5f72b66cd26eaa9ec9e8d00e12e8085694a0275a"}, ] nornir = [ {file = "nornir-3.1.1-py3-none-any.whl", hash = "sha256:217199f923c810f4a54dec8d440eb08682c8a4ea4746325bd3067dca2e32cf9f"}, @@ -2790,8 +2901,8 @@ nornir-napalm = [ {file = "nornir_napalm-0.1.2.tar.gz", hash = "sha256:be7808a990242987500a65701edb626197c5d0b87f35d9eb5da7ce7e4d60fdd5"}, ] nornir-nautobot = [ - {file = "nornir-nautobot-2.2.0.tar.gz", hash = "sha256:8bc5b7185c9dfbf31fa2177ec3e4bd93731aec63a11156d9b346c278f8f56abb"}, - {file = "nornir_nautobot-2.2.0-py3-none-any.whl", hash = "sha256:3f3916330a8c8de773b70cd3fe14da0f4809c9298be221c94168bd11a5366591"}, + {file = "nornir-nautobot-2.2.1.tar.gz", hash = "sha256:2e38ad87374ebc7f065cd404a4ead79142df5e2246ef3e804eea8951e4add89e"}, + {file = "nornir_nautobot-2.2.1-py3-none-any.whl", hash = "sha256:d78fcd8c2475179341ad4a0408a1376181b063678641173bae2af35948fa6d60"}, ] nornir-netmiko = [ {file = "nornir_netmiko-0.1.1-py3-none-any.whl", hash = "sha256:c6eadb81f6f3b2f0c27bae151cc62673303f9d085ec3c773ecdc98f20ef30f91"}, @@ -2853,8 +2964,8 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] paramiko = [ - {file = "paramiko-2.8.0-py2.py3-none-any.whl", hash = "sha256:def3ec612399bab4e9f5eb66b0ae5983980db9dd9120d9e9c6ea3ff673865d1c"}, - {file = "paramiko-2.8.0.tar.gz", hash = "sha256:e673b10ee0f1c80d46182d3af7751d033d9b573dd7054d2d0aa46be186c3c1d2"}, + {file = "paramiko-2.8.1-py2.py3-none-any.whl", hash = "sha256:7b5910f5815a00405af55da7abcc8a9e0d9657f57fcdd9a89894fdbba1c6b8a8"}, + {file = "paramiko-2.8.1.tar.gz", hash = "sha256:85b1245054e5d7592b9088cc6d08da22445417912d3a3e48138675c7a8616438"}, ] passlib = [ {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, @@ -2976,8 +3087,8 @@ psycopg2-binary = [ {file = "psycopg2_binary-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6"}, ] pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, @@ -3023,8 +3134,8 @@ pyeapi = [ {file = "pyeapi-0.8.4.tar.gz", hash = "sha256:c33ad1eadd8ebac75f63488df9412081ce0b024c9e1da12a37196a5c60427c54"}, ] pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pyjwt = [ {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, @@ -3063,8 +3174,8 @@ pynacl = [ {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"}, ] pynautobot = [ - {file = "pynautobot-1.0.3-py3-none-any.whl", hash = "sha256:8ef66962ad8143ff2c9b01b1f66165f18d52302e97d18c32b47a9b67e0b7fe7c"}, - {file = "pynautobot-1.0.3.tar.gz", hash = "sha256:01fc6cf32e6ad520c7847d304dde356890960a4d95de48af6a1ce1f43fac6a65"}, + {file = "pynautobot-1.0.4-py3-none-any.whl", hash = "sha256:e30b667cd0e5df91c93453234dc5920b7633ed1c9dcce1bb9d507072c4ae23ed"}, + {file = "pynautobot-1.0.4.tar.gz", hash = "sha256:193b5989f42254eff71655623fa2255a8c97920f28faab14360566816e29d78a"}, ] pyparsing = [ {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, @@ -3097,6 +3208,9 @@ pyserial = [ {file = "pyserial-3.5-py2.py3-none-any.whl", hash = "sha256:c4451db6ba391ca6ca299fb3ec7bae67a5c55dde170964c7a14ceefec02f2cf0"}, {file = "pyserial-3.5.tar.gz", hash = "sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb"}, ] +python-crontab = [ + {file = "python-crontab-2.6.0.tar.gz", hash = "sha256:1e35ed7a3cdc3100545b43e196d34754e6551e7f95e4caebbe0e1c0ca41c2f1b"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -3172,82 +3286,6 @@ redis = [ {file = "redis-4.0.2-py3-none-any.whl", hash = "sha256:c8481cf414474e3497ec7971a1ba9b998c8efad0f0d289a009a5bbef040894f9"}, {file = "redis-4.0.2.tar.gz", hash = "sha256:ccf692811f2c1fc7a92b466aa2599e4a6d2d73d5f736a2c70be600657c0da34a"}, ] -regex = [ - {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, - {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9ed0b1e5e0759d6b7f8e2f143894b2a7f3edd313f38cf44e1e15d360e11749b"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:473e67837f786404570eae33c3b64a4b9635ae9f00145250851a1292f484c063"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2fee3ed82a011184807d2127f1733b4f6b2ff6ec7151d83ef3477f3b96a13d03"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d5fd67df77bab0d3f4ea1d7afca9ef15c2ee35dfb348c7b57ffb9782a6e4db6e"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5d408a642a5484b9b4d11dea15a489ea0928c7e410c7525cd892f4d04f2f617b"}, - {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, - {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, - {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74cbeac0451f27d4f50e6e8a8f3a52ca074b5e2da9f7b505c4201a57a8ed6286"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:3598893bde43091ee5ca0a6ad20f08a0435e93a69255eeb5f81b85e81e329264"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:50a7ddf3d131dc5633dccdb51417e2d1910d25cbcf842115a3a5893509140a3a"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:61600a7ca4bcf78a96a68a27c2ae9389763b5b94b63943d5158f2a377e09d29a"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:563d5f9354e15e048465061509403f68424fef37d5add3064038c2511c8f5e00"}, - {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, - {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, - {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:42b50fa6666b0d50c30a990527127334d6b96dd969011e843e726a64011485da"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6e1d2cc79e8dae442b3fa4a26c5794428b98f81389af90623ffcc650ce9f6732"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0416f7399e918c4b0e074a0f66e5191077ee2ca32a0f99d4c187a62beb47aa05"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ce298e3d0c65bd03fa65ffcc6db0e2b578e8f626d468db64fdf8457731052942"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dc07f021ee80510f3cd3af2cad5b6a3b3a10b057521d9e6aaeb621730d320c5a"}, - {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, - {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, - {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, - {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f5be7805e53dafe94d295399cfbe5227f39995a997f4fd8539bf3cbdc8f47ca8"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a955b747d620a50408b7fdf948e04359d6e762ff8a85f5775d907ceced715129"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:139a23d1f5d30db2cc6c7fd9c6d6497872a672db22c4ae1910be22d4f4b2068a"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ca49e1ab99593438b204e00f3970e7a5f70d045267051dfa6b5f4304fcfa1dbf"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96fc32c16ea6d60d3ca7f63397bff5c75c5a562f7db6dec7d412f7c4d2e78ec0"}, - {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, - {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, - {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, - {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd410a1cbb2d297c67d8521759ab2ee3f1d66206d2e4328502a487589a2cb21b"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e6096b0688e6e14af6a1b10eaad86b4ff17935c49aa774eac7c95a57a4e8c296"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:529801a0d58809b60b3531ee804d3e3be4b412c94b5d267daa3de7fadef00f49"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f594b96fe2e0821d026365f72ac7b4f0b487487fb3d4aaf10dd9d97d88a9737"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2409b5c9cef7054dde93a9803156b411b677affc84fca69e908b1cb2c540025d"}, - {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, - {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, - {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, -] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, @@ -3258,14 +3296,18 @@ requests-oauthlib = [ {file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"}, ] rq = [ - {file = "rq-1.10.0-py2.py3-none-any.whl", hash = "sha256:92950a3e60863de48dd1800882939bbaf089a37497ebf9f2ecf7c9fd0a4c4a95"}, - {file = "rq-1.10.0.tar.gz", hash = "sha256:be09ec43fae9a75a4d26ea3cd520e5fa3ea2ea8cf481be33e6ec9416f0369cac"}, + {file = "rq-1.10.1-py2.py3-none-any.whl", hash = "sha256:92f4cf38b2364c1697b541e77c0fe62b7e5242fa864324f262be126ee2a07e3a"}, + {file = "rq-1.10.1.tar.gz", hash = "sha256:62d06b44c3acfa5d1933c5a4ec3fbc2484144a8af60e318d0b8447c5236271e2"}, ] "ruamel.yaml" = [ {file = "ruamel.yaml-0.16.13-py2.py3-none-any.whl", hash = "sha256:64b06e7873eb8e1125525ecef7345447d786368cadca92a7cd9b59eae62e95a3"}, {file = "ruamel.yaml-0.16.13.tar.gz", hash = "sha256:bb48c514222702878759a05af96f4b7ecdba9b33cd4efcf25c86b882cef3a942"}, ] "ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, @@ -3293,8 +3335,8 @@ rx = [ {file = "Rx-1.6.1.tar.gz", hash = "sha256:13a1d8d9e252625c173dc795471e614eadfe1cf40ffc684e08b8fff0d9748c23"}, ] scp = [ - {file = "scp-0.14.1-py2.py3-none-any.whl", hash = "sha256:e4e0b9b41b73ebcc4e988e8f43039dc3715e88f3ee7b3e2d21521975bcfc82ee"}, - {file = "scp-0.14.1.tar.gz", hash = "sha256:b776bd6ce8c8385aa9a025b64a9815b5d798f12d4ef0d712d569503f62aece8b"}, + {file = "scp-0.14.2-py2.py3-none-any.whl", hash = "sha256:ec00097adadf85f147d085561b2f9cc209293f59541b8e1b65649bf9e4120b4a"}, + {file = "scp-0.14.2.tar.gz", hash = "sha256:713f117413bbd616a1a7da8f07db9adcd835ce73d8585fb469ea5b5785f92e4d"}, ] singledispatch = [ {file = "singledispatch-3.7.0-py2.py3-none-any.whl", hash = "sha256:bc77afa97c8a22596d6d4fc20f1b7bdd2b86edc2a65a4262bdd7cc3cc19aa989"}, @@ -3354,33 +3396,33 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, - {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, + {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, + {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, ] transitions = [ {file = "transitions-0.8.10-py2.py3-none-any.whl", hash = "sha256:2e9de3c453fafe3a8afef2c2bdc47e30a7a73e32d88f793ad4c1b6246f0cc364"}, {file = "transitions-0.8.10.tar.gz", hash = "sha256:b0385975a842e885c1a55c719d2f90164471665794d39d51f9eb3f11e1d9c8ac"}, ] typed-ast = [ - {file = "typed_ast-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b310a207ee9fde3f46ba327989e6cba4195bc0c8c70a158456e7b10233e6bed"}, - {file = "typed_ast-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52ca2b2b524d770bed7a393371a38e91943f9160a190141e0df911586066ecda"}, - {file = "typed_ast-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14fed8820114a389a2b7e91624db5f85f3f6682fda09fe0268a59aabd28fe5f5"}, - {file = "typed_ast-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:65c81abbabda7d760df7304d843cc9dbe7ef5d485504ca59a46ae2d1731d2428"}, - {file = "typed_ast-1.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:37ba2ab65a0028b1a4f2b61a8fe77f12d242731977d274a03d68ebb751271508"}, - {file = "typed_ast-1.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:49af5b8f6f03ed1eb89ee06c1d7c2e7c8e743d720c3746a5857609a1abc94c94"}, - {file = "typed_ast-1.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e4374a76e61399a173137e7984a1d7e356038cf844f24fd8aea46c8029a2f712"}, - {file = "typed_ast-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ea517c2bb11c5e4ba7a83a91482a2837041181d57d3ed0749a6c382a2b6b7086"}, - {file = "typed_ast-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:51040bf45aacefa44fa67fb9ebcd1f2bec73182b99a532c2394eea7dabd18e24"}, - {file = "typed_ast-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:806e0c7346b9b4af8c62d9a29053f484599921a4448c37fbbcbbf15c25138570"}, - {file = "typed_ast-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a67fd5914603e2165e075f1b12f5a8356bfb9557e8bfb74511108cfbab0f51ed"}, - {file = "typed_ast-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:224afecb8b39739f5c9562794a7c98325cb9d972712e1a98b6989a4720219541"}, - {file = "typed_ast-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:155b74b078be842d2eb630dd30a280025eca0a5383c7d45853c27afee65f278f"}, - {file = "typed_ast-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:361b9e5d27bd8e3ccb6ea6ad6c4f3c0be322a1a0f8177db6d56264fa0ae40410"}, - {file = "typed_ast-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:618912cbc7e17b4aeba86ffe071698c6e2d292acbd6d1d5ec1ee724b8c4ae450"}, - {file = "typed_ast-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e6731044f748340ef68dcadb5172a4b1f40847a2983fe3983b2a66445fbc8e6"}, - {file = "typed_ast-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8a9b9c87801cecaad3b4c2b8876387115d1a14caa602c1618cedbb0cb2a14e6"}, - {file = "typed_ast-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:ec184dfb5d3d11e82841dbb973e7092b75f306b625fad7b2e665b64c5d60ab3f"}, - {file = "typed_ast-1.5.0.tar.gz", hash = "sha256:ff4ad88271aa7a55f19b6a161ed44e088c393846d954729549e3cde8257747bb"}, + {file = "typed_ast-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d8314c92414ce7481eee7ad42b353943679cf6f30237b5ecbf7d835519e1212"}, + {file = "typed_ast-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b53ae5de5500529c76225d18eeb060efbcec90ad5e030713fe8dab0fb4531631"}, + {file = "typed_ast-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:24058827d8f5d633f97223f5148a7d22628099a3d2efe06654ce872f46f07cdb"}, + {file = "typed_ast-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a6d495c1ef572519a7bac9534dbf6d94c40e5b6a608ef41136133377bba4aa08"}, + {file = "typed_ast-1.5.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:de4ecae89c7d8b56169473e08f6bfd2df7f95015591f43126e4ea7865928677e"}, + {file = "typed_ast-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:256115a5bc7ea9e665c6314ed6671ee2c08ca380f9d5f130bd4d2c1f5848d695"}, + {file = "typed_ast-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:7c42707ab981b6cf4b73490c16e9d17fcd5227039720ca14abe415d39a173a30"}, + {file = "typed_ast-1.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:71dcda943a471d826ea930dd449ac7e76db7be778fcd722deb63642bab32ea3f"}, + {file = "typed_ast-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4f30a2bcd8e68adbb791ce1567fdb897357506f7ea6716f6bbdd3053ac4d9471"}, + {file = "typed_ast-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca9e8300d8ba0b66d140820cf463438c8e7b4cdc6fd710c059bfcfb1531d03fb"}, + {file = "typed_ast-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9caaf2b440efb39ecbc45e2fabde809cbe56272719131a6318fd9bf08b58e2cb"}, + {file = "typed_ast-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9bcad65d66d594bffab8575f39420fe0ee96f66e23c4d927ebb4e24354ec1af"}, + {file = "typed_ast-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:591bc04e507595887160ed7aa8d6785867fb86c5793911be79ccede61ae96f4d"}, + {file = "typed_ast-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:a80d84f535642420dd17e16ae25bb46c7f4c16ee231105e7f3eb43976a89670a"}, + {file = "typed_ast-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:38cf5c642fa808300bae1281460d4f9b7617cf864d4e383054a5ef336e344d32"}, + {file = "typed_ast-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b6ab14c56bc9c7e3c30228a0a0b54b915b1579613f6e463ba6f4eb1382e7fd4"}, + {file = "typed_ast-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2b8d7007f6280e36fa42652df47087ac7b0a7d7f09f9468f07792ba646aac2d"}, + {file = "typed_ast-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:b6d17f37f6edd879141e64a5db17b67488cfeffeedad8c5cec0392305e9bc775"}, + {file = "typed_ast-1.5.1.tar.gz", hash = "sha256:484137cab8ecf47e137260daa20bafbba5f4e3ec7fda1c1e69ab299b75fa81c5"}, ] typing-extensions = [ {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, @@ -3404,8 +3446,8 @@ wcwidth = [ {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] win32-setctime = [ - {file = "win32_setctime-1.0.3-py3-none-any.whl", hash = "sha256:dc925662de0a6eb987f0b01f599c01a8236cb8c62831c22d9cada09ad958243e"}, - {file = "win32_setctime-1.0.3.tar.gz", hash = "sha256:4e88556c32fdf47f64165a2180ba4552f8bb32c1103a2fafd05723a0bd42bd4b"}, + {file = "win32_setctime-1.0.4-py3-none-any.whl", hash = "sha256:7964234073ad9bc7a689ef2ebe6ce931976b644fe73fd50cf7729c996b7d8385"}, + {file = "win32_setctime-1.0.4.tar.gz", hash = "sha256:2b72b798fdc1d909fb3cc0d25e0be52a42f4848857e3588dd3947c6a18b42609"}, ] wrapt = [ {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, diff --git a/pyproject.toml b/pyproject.toml index f93585f4..4b8cec3e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pydocstyle = "*" pylint = "*" pylint-django = "*" yamllint = "*" -Markdown = "3.3.4" # temp pin for https://github.com/Python-Markdown/markdown/issues/1195 +Markdown = {version="*", python="^3.6.2"} [tool.black] From 0c3e902582d1ce68094d731aa614f35fc28e701f Mon Sep 17 00:00:00 2001 From: mitchell-foxworth <71849794+mitchell-foxworth@users.noreply.github.com> Date: Mon, 20 Dec 2021 15:46:49 -0500 Subject: [PATCH 15/36] Multiple repo support (#167) * new branch for supporting multiple backup/intended repos Co-authored-by: Jeff Kala <48843785+jeffkala@users.noreply.github.com> Co-authored-by: Mitch Zubarev-Foxworth Co-authored-by: DrX Co-authored-by: h4ndzdatm0ld Co-authored-by: Hugo Tinoco <43675476+h4ndzdatm0ld@users.noreply.github.com> Co-authored-by: Jeff Kala <48843785+jeffkala@users.noreply.github.com> Co-authored-by: Jeremy White Co-authored-by: itdependsnetworks --- CHANGELOG.md | 10 ++ development/nautobot_config.py | 22 +++ docs/installation.md | 2 +- docs/navigating-backup.md | 83 ++++++++- docs/navigating-compliance.md | 6 +- docs/navigating-golden.md | 16 +- docs/navigating-intended.md | 27 ++- docs/navigating-sot-agg.md | 24 +-- docs/quick-start.md | 10 +- nautobot_golden_config/forms.py | 29 ++++ nautobot_golden_config/jobs.py | 48 ++--- .../0006_multi_repo_support_temp_field.py | 49 ++++++ .../0007_multi_repo_support_convert_many.py | 70 ++++++++ .../0008_multi_repo_support_final.py | 25 +++ nautobot_golden_config/models.py | 52 +++++- .../nornir_plays/config_backup.py | 14 +- .../nornir_plays/config_compliance.py | 22 ++- .../nornir_plays/config_intended.py | 28 ++- .../goldenconfigsetting.html | 137 ++++++++------- .../nautobot_golden_config/manytomany.html | 6 + nautobot_golden_config/tests/conftest.py | 132 +++++++++++++- .../tests/forms/__init__.py | 1 + .../forms/test_golden_config_settings.py | 77 ++++++++ nautobot_golden_config/tests/test_graphql.py | 15 +- nautobot_golden_config/tests/test_models.py | 164 +++++++++++++++++- .../tests/test_utilities/test_helpers.py | 83 ++++++++- nautobot_golden_config/utilities/helper.py | 67 ++++++- nautobot_golden_config/views.py | 2 +- pyproject.toml | 4 +- tasks.py | 14 +- 30 files changed, 1040 insertions(+), 199 deletions(-) create mode 100644 nautobot_golden_config/migrations/0006_multi_repo_support_temp_field.py create mode 100644 nautobot_golden_config/migrations/0007_multi_repo_support_convert_many.py create mode 100644 nautobot_golden_config/migrations/0008_multi_repo_support_final.py create mode 100644 nautobot_golden_config/templates/nautobot_golden_config/manytomany.html create mode 100644 nautobot_golden_config/tests/forms/__init__.py create mode 100644 nautobot_golden_config/tests/forms/test_golden_config_settings.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 02eb7604..f3a2c824 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## v1.0.0 - 2021-XX-XX + +### Added + +- GoldenConfigSettings enforces a `template path` be provided if more than 1 backup or intended repository is configured. +- Updated backup job to execute against multiple repos if available based on pattern matching. +- Updated intended job to execute against multiple repos if available based on pattern matching. +- Updated compliance job to execute from multiple repos if available based on pattern matching. +- Added utility function to determine the local filesystem path which stores the backup and intended repository files for a given device. + ## v0.9.10 - 2021-11 ### Announcements diff --git a/development/nautobot_config.py b/development/nautobot_config.py index ebb241b6..458c0636 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -141,6 +141,17 @@ }, }, }, + # dispatcher_mapping may be necessary if you get an error `Cannot import "". Is the library installed?` + # when you run a backup job, and is the name of the platform applied to the device. + # to the Nornir driver names ("arista_eos", "cisco_ios", etc.). + # "dispatcher_mapping": { + # "eos": "nornir_nautobot.plugins.tasks.dispatcher.arista_eos.NautobotNornirDriver", + # "arbitrary_platform_name": "nornir_nautobot.plugins.tasks.dispatcher.arista_eos.NautobotNornirDriver", + # "ios": "nornir_nautobot.plugins.tasks.dispatcher.cisco_ios.NautobotNornirDriver", + # "iosxe": "nornir_nautobot.plugins.tasks.dispatcher.cisco_ios.NautobotNornirDriver", + # "junos": "nornir_nautobot.plugins.tasks.dispatcher.juniper_junos.NautobotNornirDriver", + # "nxos": "nornir_nautobot.plugins.tasks.dispatcher.cisco_nxos.NautobotNornirDriver", + # }, }, "nautobot_golden_config": { "per_feature_bar_width": float(os.environ.get("PER_FEATURE_BAR_WIDTH", 0.15)), @@ -151,6 +162,17 @@ "enable_intended": is_truthy(os.environ.get("ENABLE_INTENDED", True)), "enable_sotagg": is_truthy(os.environ.get("ENABLE_SOTAGG", True)), "sot_agg_transposer": os.environ.get("SOT_AGG_TRANSPOSER"), + # The platform_slug_map maps an arbitrary platform slug to its corresponding parser. + # Use this if the platform slug names in your Nautobot instance don't correspond exactly + # to the Nornir driver names ("arista_eos", "cisco_ios", etc.). + # Each key should == the slug of the Nautobot platform object. + # "platform_slug_map": { + # "eos": "arista_eos", + # "ios": "cisco_ios", + # "iosxe": "cisco_ios", + # "junos": "juniper_junos", + # "nxos": "cisco_nxos", + # }, # "get_custom_compliance": "my.custom_compliance.func", }, } diff --git a/docs/installation.md b/docs/installation.md index 7b27860c..74b9ddf8 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -60,6 +60,6 @@ The plugin behavior can be controlled with the following list of settings. | per_feature_width | 13 | 13 | The width in inches that the overview table can be. | | per_feature_height | 4 | 4 | The height in inches that the overview table can be. | -> Note: Over time the intention is to make the compliance report more dynamic, but for now allow users to configure the `per_*` configs in a way that fits best for them. +> Note: Over time the compliance report will become more dynamic, but for now allow users to configure the `per_*` configs in a way that fits best for them. > Note: Review [`nautobot_plugin_nornir`](https://pypi.org/project/nautobot-plugin-nornir/) for Nornir and dispatcher configuration options. diff --git a/docs/navigating-backup.md b/docs/navigating-backup.md index 6b8b5627..2571bbce 100644 --- a/docs/navigating-backup.md +++ b/docs/navigating-backup.md @@ -1,13 +1,14 @@ # Configuration Backup -The backup configuration process relies on the ability for the nautobot worker to connect via Nornir to the device, run the `show run` or equivalent command +The backup configuration process requires the Nautobot worker to connect via Nornir to the device, and run the `show run` or equivalent command, and save the configuration. The high-level process to run backups is: -* Download the latest Git repository, based on `backup config` type Git repo within Nautobot. +* Download the latest version of each of the Git repositories configured with the `backup configs` capability within Nautobot. * Run a Nornir play to obtain the cli configurations. * Optionally perform some lightweight processing of the backup. -* Store the backup configurations locally. -* Push configurations to the remote Git repository. +* Store each device's backup configuration file on the local filesystem. +* Commit all files added or changed in each repository. +* Push configuration files to the remote Git repositories. ## Configuration Backup Settings @@ -18,11 +19,71 @@ uses cases, the following settings are available and further documented below. * Config Removals - provides the ability to remove a line based on a regex match. * Config Replacements - provides the ability to swap out parts of a line based on a regex match. -Backup configurations rely on a Git Repo, and the plugin registers an additional repository for Git source this access. Within the Nautobot Git -repositories, there will be a `backup config` option, which there must be one and only one configured for the process to work. For further details, refer -[to](./navigating-golden.md#git-settings). +### Backup Repositories -The `backup_path_template` provides the ability to dynamically state per device where the configurations should end up in the file structure. Every device is a Django ORM object, tied to the model instance of a `Device` model, and that is represented as `obj`. That means that any valid Device model method is available. This is then compiled via Jinja. This may seem complicated, but the equivalent of `obj` by example would be: +In the `Backup Repositories` field of the UI, configure all of the repositories which you intend to use for backed-up device configurations as part of Golden Config. + +Backup repositories must first be configured under **Extensibility -> Git Repositories**. When you configure a repository, look for the `Provides` field in the UI. To serve as a configuration backup store, the repository must be configured with the `backup configs` capability under the `Provides` field. For further details, refer to [Navigating Nautobot Git Settings](./navigating-golden.md#git-settings). + +### Backup Repository Matching Rule + +.. Note:: + Only use a Backup Repository Matching Rule if you have **more than one** backup repository. It is not needed if you only have one, and will cause backup failures for any devices which do not match the rule. The setting is mandatory if you have more than one repository. + +The `backup_match_rule` setting allows you to match a given `Device` Django ORM object to a backup Git repository. This field should contain a Jinja2-formatted template. The plugin populates the variables in the Jinja2 template via the GraphQL query configured on the plugin. + +Say that in your environment you have three regions in which your devices reside: North America, Asia Pacific, and Africa. You have populated these values as `Region` objects in Nautobot, and assigned a `Region` value to each of your devices. You want your backup solution to scale well, so you have a dedicated backup Git repository for each region. Every Nautobot object has a `slug` (URL compatible) name in addition to its human-friendly name; our regions' slugs are `north-america`, `asia-pacific`, and `africa`. To configure the plugin to match devices to the desired Git repository, you must first configure the GraphQL query; a _VERY_ simple one might look like this: +``` +query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + platform { + manufacturer { + name + } + name + napalm_driver + slug + } + primary_ip4 { + address + interface { + name + } + id + } + site { + name + region { + name + slug + } + slug + } + } +} +``` + +The query will look at the `Device` ORM object, and return the values from the query as keys under the top-level `obj` key. The `obj` key represents the Device object. With this GraphQL query, we can make a Jinja2 template to translate the returned values into a string. For example, say that you have a device which is in your Sydney, AU office, which is in the `asia-pacific` region in Nautobot. If you made a Jinja2 template based on that, which looked like this: + +``` +{{obj.site.region.slug}} +``` +Then the template would be rendered to the string: +``` +asia-pacific +``` + +When you create backup repositories, pay attention to your naming scheme. You should name each repository in a way that matches the value of whatever parameter from the Device object which you wish to use to sort devices into repositories. So, for our Sydney device above, it would work to name your Asia Pacific repository something "Asia Pacific Device Backups". This would give it a `slug` value of `asia-pacific-device-backups`, and you could use this in a backup repository matching rule with a template like this: + +``` +{{obj.site.region.slug}}-device-backups +``` + +### Backup Path Template + +The `backup_path_template` setting gives you a way to dynamically place each device's configuration file in the repository file structure. This setting uses the GraphQL query configured for the plugin. It works in a similar way to the Backup Repository Matching Rule above. Since the setting uses a GraphQL query, any valid Device model method is available. The plugin renders the values from the query, using Jinja2, to the relative path and file name in which to store a given device's configuration inside its backup repository. This may seem complicated, but the equivalent of `obj` by example would be: ```python obj = Device.objects.get(name="nyc-rt01") @@ -33,12 +94,16 @@ An example would be: backup_path_template = "{{obj.site.slug}}/{{obj.name}}.cfg" ``` +With a Sydney, AU device `SYD001AURTR32`, in the site named `Sydney001` and the GraphQL query and `backup_path_template` configured above, our backed-up config would be placed in the repo in `/sydney001/SYD001AURTR32.cfg`. The site value `sydney001` here is lower case because our template refers to the `slug` value, which by default will be lower case. + The backup process will automatically create folders as required based on the path definition. The `backup_path_template` can be set in the UI. For navigation details [see](./navigating-golden.md#application-settings). +### Device Login Credentials + The credentials/secrets management is further described within the [nautbot-plugin-nornir](https://github.com/nautobot/nautobot-plugin-nornir) -repo. For the simplist use case you can set environment variables for `NAPALM_USERNAME`, `NAPALM_PASSWORD`, and `DEVICE_SECRET`. For more +repository. For the simplist use case you can set environment variables for `NAPALM_USERNAME`, `NAPALM_PASSWORD`, and `DEVICE_SECRET`. For more complicated use cases, please refer to the plugin documentation linked above. ## Starting a Backup Job diff --git a/docs/navigating-compliance.md b/docs/navigating-compliance.md index 54ffbff1..041eeecf 100644 --- a/docs/navigating-compliance.md +++ b/docs/navigating-compliance.md @@ -22,10 +22,10 @@ The tool makes no assumptions to determine what an engineer may want to do, but ## Compliance Configuration Settings -In order to generate the intended configurations two repositories are needed. +In order to generate the intended configurations, a minimum of two repositories are required. -1. A repo to save [intended configurations](./navigating-golden.md#git-settings) to once generated. -2. A repo that stores [Backups](./navigating-golden.md#git-settings) used to as the actual configurations. +1. At least one repository in which to save [intended configurations](./navigating-golden.md#git-settings) once generated. +2. At least one repository in which to store [Backups](./navigating-golden.md#git-settings) of devices' running configurations. 3. The [intended_path_template](./navigating-golden.md#application-settings) configuration parameter. 4. The [backup_path_template](./navigating-golden.md#application-settings) configuration parameter. diff --git a/docs/navigating-golden.md b/docs/navigating-golden.md index 19fdcd61..3a4258d1 100644 --- a/docs/navigating-golden.md +++ b/docs/navigating-golden.md @@ -44,13 +44,15 @@ To configure or update the settings click the pencil icon to edit. |Setting|Explanation| |:--|:--| -|Backup Repository | This is the Git Repository where your backup configurations will be found. | -|Backup Path|This represents the Jinja path where the backup files will be found. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`| -|Intended Repository | This is the Git Repository where your backup configurations will be found. | -|Intended Path|The Jinja path representation of where the generated file will be places. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`| -|Jinja Repository | This is the Git Repository where your jinja templates will be found. | -|Jinja Path|The Jinja path representation of where the Jinja temaplte can be found. e.g. `{{obj.platform.slug}}.j2`| -|Scope| This is where the scope of devices to be considered within Golden Config is defined. | +|Backup Repositories |One or more Git repositories where your backup configurations will be found. | +|Backup Repository Matching Rule |A Jinja template to match a device to a backup repositories `slug` value. Required if you configure more than one backup repository. E.g. `my-backup-repo-{{obj.site.region.slug}}` | +|Backup Path|A Jinja template which defines the path and name of backup files within the backup repository. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`| +|Intended Repositories |One or more Git repository where your intended configuration state files will be found. | +|Intended Repository Matching Rule |A Jinja template to match a device to an intended state repository's `slug` value. Required if you configure more than one intended repository. E.g. `best-of-intentions-repo-{{obj.site.region.slug}}` | +|Intended Path|A Jinja template which defines the path and name of intended configuration state files within the intended state repository. e.g. `{{obj.site.slug}}/{{obj.name}}.intended_cfg`| +|Jinja Repository |The Git Repository where your jinja templates will be found. | +|Jinja Path|A Jinja template which defines the path (within the repository) and name of the Jinja template file. e.g. `{{obj.platform.slug}}/{{obj.role.slug}}/main.j2`| +|Scope|The scope of devices on which Golden Config's jobs can operate. | |GraphQL Query|A query that is evaluated and used to render the config. The query must start with `query ($device_id: ID!)`.| > Note: Each of these will be further detailed in their respective sections. diff --git a/docs/navigating-intended.md b/docs/navigating-intended.md index 4bb7d412..bade01a1 100644 --- a/docs/navigating-intended.md +++ b/docs/navigating-intended.md @@ -2,12 +2,13 @@ ## Configuration Generation -The Golden Config plugin provides the ability to generate configurations. The process is a Nornir play that points to a single Jinja template per -device that generates the configurations. Data is provided via the Source of Truth aggregation and is currently a hard requirement to be turned on if -generating configuration via the Golden Config plugin. Whatever data is returned by the Source of Truth Aggregation is available to the Jinja template. +The Golden Config plugin **Intended Configuration** job generates intended state files for each device in the plugin's configured scope. An intended state file contains the output from rendering the device's Source of Truth Aggregation values through the Jinja templates used by the plugin. -As previous stated, there can only be a single template per device. It is often advantageous to break configurations into smaller snippets. A common pattern -to overcome is: +The job itself is a Nornir play which uses a single Jinja template per device. Source of Truth Aggregation data comes from the GraphQL query configured in the Golden Config plugin's settings. An important component of the SoT Aggregation data are the `config_context` values. `config_context` should contain a vendor-neutral, JSON structured representation of a device's configuration values: a list of NTP/AAA/Syslog servers, common VRFs, etc. See [Config Contexts](https://nautobot.readthedocs.io/en/latest/additional-features/config-contexts/#configuration-contexts) for more information. + +The Source of Truth Aggregation feature of the plugin must be enabled for the plugin to generate intended configuration state output. + +There can only be a single Jinja template per device. Device configurations can become daunting to create via a Jinja template, if you try to place all of the logic for a device's configuration inside a single Jinja2 file. These template files can quickly become too complex to maintain. So, it is often advantageous to break configurations into smaller feature-oriented snippets, each contained in their own discrete template file. Operators often keep their main, top-level, template simple and easy to maintain by only placing include statements in it: ```jinja ! @@ -28,6 +29,7 @@ or ! {% endfor %} ``` +In these examples, `/services.j2`, `/ntp.j2`, etc. could contain the actual Jinja code which renders the configuration for their corresponding features. Alternately, in more complex environments, these files could themselves contain only include statements in order to create a hierarchy of template files so as to keep each individual file neat and simple. Think of the main, top-level, template as an entrypoint into a hierarchy of templates. A well thought out structure to your templates is necessary to avoid the temptation to place all logic into a small number of templates. Like any code, Jinja2 functions become harder to manage, more buggy, and more fragile as you add complexity, so any thing which you can do to keep them simple will help your automation efforts. ## Adding Jinja2 Filters to the Environment. @@ -88,13 +90,22 @@ To start a intended configuration job manually: ## Intended Configuration Settings -In order to generate the intended configurations two repositories are needed. +In order to generate the intended configurations at least two repositories are needed. -1. A repo to save [intended configurations](./navigating-golden.md#git-settings) to once generated. -2. A repo that stores [backups](./navigating-golden.md#git-settings) used to as the actual configurations. +1. At least one repository in which to save [intended configurations](./navigating-golden.md#git-settings) once generated. +2. At least one repository in which to store device [backups](./navigating-golden.md#git-settings); the device's current operating configuration. 3. The [intended_path_template](./navigating-golden.md#application-settings) configuration parameter. 4. The [jinja_path_template](./navigating-golden.md#application-settings) configuration parameter. +### Intended Repository Matching Rule + +.. Note:: + Only use a Intended Repository Matching Rule if you have **more than one** intended repository. It is **not needed"" if you only have one repository. The operator is expected to ensure that every device results in a successful matching rule (or that device will fail to render a config). + +The `intended_match_rule` setting allows you to match a given `Device` Django ORM object to a backup Git repository. This field should contain a Jinja2-formatted template. The plugin populates the variables in the Jinja2 template via the GraphQL query configured on the plugin. + +This is exactly the same concept as described in [Backup Repository Matching Rule](./navigating-backup.md#repository-matching-rule), and better described there. + ## Data The data provided while rendering the configuration of a device is described in the [SoT Aggregation](./navigating-sot-agg.md) overview. diff --git a/docs/navigating-sot-agg.md b/docs/navigating-sot-agg.md index b18bb10d..8ef179f4 100644 --- a/docs/navigating-sot-agg.md +++ b/docs/navigating-sot-agg.md @@ -1,10 +1,11 @@ # SoT Aggregation Overview -The Source of Truth Aggregation Overview is driven by a few key components. +The Source of Truth Aggregation feature uses several key components: -* The ability to have a single GraphQL query to aggregate information. -* The ability to modify data with a "transposer" function. -* The usage of config contexts and the Nautobot's native git platform. +* A single GraphQL query which aggregates device data. +* A facility to modify data with a "transposer" function. +* Nautobot's config context feature and policy engine. +* Nautobot's native git platform. ## GraphQL @@ -14,7 +15,7 @@ your data, and then save that query to the configuration. The application config * The query is a valid GraphQL query. * The query starts with exactly "query ($device_id: ID!)"". This is to help fail fast and help with overall user experience of clear expectations. -It is worth noting that the graphQL query returned is modified to remove the root key of `device`, so instead of all data being within device, such as +Note that the GraphQL query returned is modified to remove the root key of `device`, so instead of all data being within device, such as `{"device": {"site": {"slug": "jcy"}}}`, it is simply `{"site": {"slug": "jcy"}}` as an example. It is helpful to make adjustments to the query, and then view the data from the Plugin's home page and clicking on a given device's `code-json` icon. @@ -35,8 +36,8 @@ def transposer(data): return data ``` -While the example transposer is silly and untested, it provides the structure for which a transposer can be use. The possibilities are obviously endless, -such as reaching out to an external system but operators should use caution not to overload complexity into the transposer. +While the example transposer is silly and untested, it provides the structure for which a transposer can be used. The possibilities are obviously endless, +such as reaching out to an external system, but operators should use caution not to overload complexity into the transposer. The configuration required in the Plugin configuration is as described below. @@ -47,17 +48,16 @@ The path described must be within the Python path of your worker. It is up to th ## Config Contexts -Outside of the scope of this document, but it is worth mentioning the power that configuration context's with integration to Git can provide in this -solution. This is important since config contexts can be used for arbitrary JSON serializable data structures. That is helpful to model configuration -that is not within Nautobot Core or within a Nautobot Plugin. A common use case is to model "global configuration" like data, such as NTP, DNS, SNMP, etc. +While outside the scope of this document, it is worth mentioning the power that the `config_context` feature, along with integration to Git, can provide in this +solution. Config contexts can be used for arbitrary JSON serializable data structures. That is helpful to model configuration +that would not normally be available within Nautobot Core Django ORM models or within a Nautobot plugin's custom models. A common use case is to model "global configuration" like data, such as NTP, DNS, SNMP, etc. For more information, please refer to the Nautobot Core documentation on [Config Contexts](https://nautobot.readthedocs.io/en/latest/additional-features/config-contexts/#configuration-contexts) and leveraging [Git Data Sources](https://nautobot.readthedocs.io/en/stable/user-guides/git-data-source/#using-git-data-sources). ## Performance -The GraphQL and transposer functionality could seriously impact the performance of the server. There are no restrictions imposed as it is up to the -operator to weigh the pros and cons of the solution. +The GraphQL and transposer functions have potential to seriously impact the performance of the Nautobot application. Operator should weigh the pros and cons of the solution before committing to the use of these functions. ## Sample Query diff --git a/docs/quick-start.md b/docs/quick-start.md index a94bf6bb..baa2a4d2 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -10,11 +10,11 @@ Follow the steps below to get up and running for the configuration backup elemen 1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_backup": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. -2. Add the git repository that will be used to house the backup configurations. +2. Add any git repositories that will be used to house the backup configurations. 1. In the UI `Extensibility -> Git Repositories`. Click Add. 2. Populate the Git Repository data for the backup. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **provides** called `backup configs`. + 3. Make sure to select the **Provides** called `backup configs`. 4. Click Create. 3. Next, make sure to update the Plugins **Settings** with the backup details. @@ -47,18 +47,18 @@ Follow the steps below to get up and running for the intended configuration elem 1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_intended": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. -2. Add the git repository that will be used to house the intended configurations. +2. Add any git repositories that will be used to house the intended configurations. 1. In the UI `Extensibility -> Git Repositories`. Click Add. 2. Populate the Git Repository data for the intended. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **provides** called `intended configs`. + 3. Make sure to select the **Provides** called `intended configs`. 4. Click Create. 3. Add the git repository that will be used to house the Jinja2 templates. 1. In the UI `Extensibility -> Git Repositories`. Click Add. 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **provides** called `jinja templates`. + 3. Make sure to select the **Provides** called `jinja templates`. 4. Click Create. 4. Next, make sure to update the Plugins **Settings** with the intended and jinja2 template details. diff --git a/nautobot_golden_config/forms.py b/nautobot_golden_config/forms.py index b9725291..90b4feb5 100644 --- a/nautobot_golden_config/forms.py +++ b/nautobot_golden_config/forms.py @@ -6,9 +6,13 @@ import nautobot.utilities.forms as utilities_forms from nautobot.dcim.models import Device, Platform, Region, Site, DeviceRole, DeviceType, Manufacturer, Rack, RackGroup from nautobot.extras.models import Status +from nautobot.extras.models import GitRepository from nautobot.tenancy.models import Tenant, TenantGroup +from nautobot.utilities.forms import StaticSelect2Multiple, SlugField + from nautobot_golden_config import models +from nautobot_golden_config.utilities.helper import clean_config_settings # ConfigCompliance @@ -166,6 +170,8 @@ class ComplianceFeatureForm( ): """Filter Form for ComplianceFeature instances.""" + slug = SlugField() + class Meta: """Boilerplate form Meta data for compliance feature.""" @@ -330,14 +336,25 @@ class GoldenConfigSettingFeatureForm( ): """Filter Form for GoldenConfigSettingFeatureForm instances.""" + backup_repository = forms.ModelMultipleChoiceField( + queryset=GitRepository.objects.filter(provided_contents__contains="nautobot_golden_config.backupconfigs"), + widget=StaticSelect2Multiple(), + ) + intended_repository = forms.ModelMultipleChoiceField( + queryset=GitRepository.objects.filter(provided_contents__contains="nautobot_golden_config.intendedconfigs"), + widget=StaticSelect2Multiple(), + ) + class Meta: """Filter Form Meta Data for GoldenConfigSettingFeatureForm instances.""" model = models.GoldenConfigSetting fields = ( "backup_repository", + "backup_match_rule", "backup_path_template", "intended_repository", + "intended_match_rule", "intended_path_template", "jinja_repository", "jinja_path_template", @@ -345,3 +362,15 @@ class Meta: "scope", "sot_agg_query", ) + + def clean(self): + """Clean.""" + super().clean() + # This custom clean function validates logic of when or when not to + # have a template matching path in GlobalConfigSettings for repos. + for repo_type in ["intended", "backup"]: + clean_config_settings( + repo_type=repo_type, + repo_count=self.cleaned_data.get(f"{repo_type}_repository").count(), + match_rule=self.cleaned_data.get(f"{repo_type}_match_rule"), + ) diff --git a/nautobot_golden_config/jobs.py b/nautobot_golden_config/jobs.py index a6ed625b..66a3fc16 100644 --- a/nautobot_golden_config/jobs.py +++ b/nautobot_golden_config/jobs.py @@ -22,17 +22,17 @@ name = "Golden Configuration" # pylint: disable=invalid-name -def git_wrapper(obj, orm_obj, git_type): +def git_wrapper(obj, repository_record, git_type): """Small wrapper to pull latest branch, and return a GitRepo plugin specific object.""" - if not orm_obj: + if not repository_record: obj.log_failure( obj, f"FATAL ERROR: There is not a valid Git repositories for Git type {git_type}, please see pre-requisite instructions to configure an appropriate Git repositories.", ) raise # pylint: disable=misplaced-bare-raise - ensure_git_repository(orm_obj, obj.job_result) - git_repo = GitRepo(orm_obj) + ensure_git_repository(repository_record, obj.job_result) + git_repo = GitRepo(repository_record) return git_repo @@ -96,11 +96,7 @@ def run(self, data, commit): # pylint: disable=too-many-branches """Run config compliance report script.""" # pylint: disable-msg=too-many-locals # pylint: disable=unused-argument - - backup_repo = git_wrapper(self, GoldenConfigSetting.objects.first().backup_repository, "backup") - intended_repo = git_wrapper(self, GoldenConfigSetting.objects.first().intended_repository, "intended") - - config_compliance(self, data, backup_repo.path, intended_repo.path) + config_compliance(self, data) class IntendedJob(Job, FormEntry): @@ -121,7 +117,7 @@ class IntendedJob(Job, FormEntry): debug = FormEntry.debug class Meta: - """Meta object boilerplate for intedned.""" + """Meta object boilerplate for intended.""" name = "Generate Intended Configurations" description = "Generate the configuration for your intended state." @@ -130,17 +126,23 @@ class Meta: def run(self, data, commit): """Run config generation script.""" now = datetime.now() + LOGGER.debug("Pull Jinja template repo.") jinja_repo = git_wrapper(self, GoldenConfigSetting.objects.first().jinja_repository, "jinja") + LOGGER.debug("Pull Intended config repo.") - intended_repo = git_wrapper(self, GoldenConfigSetting.objects.first().intended_repository, "intended") + golden_config = GoldenConfigSetting.objects.first() + # Instantiate a GitRepo object for each GitRepository in GoldenConfigSettings. + intended_repos = [git_wrapper(self, repo, "intended") for repo in golden_config.intended_repository.all()] LOGGER.debug("Run config intended nornir play.") - config_intended(self, data, jinja_repo.path, intended_repo.path) + config_intended(self, data, jinja_repo.path) - LOGGER.debug("Push new intended configs to repo.") - intended_repo.commit_with_added(f"INTENDED CONFIG CREATION JOB - {now}") - intended_repo.push() + # Commit / Push each repo after job is completed. + for intended_repo in intended_repos: + LOGGER.debug("Push new intended configs to repo %s.", intended_repo.url) + intended_repo.commit_with_added(f"INTENDED CONFIG CREATION JOB - {now}") + intended_repo.push() class BackupJob(Job, FormEntry): @@ -171,14 +173,20 @@ def run(self, data, commit): """Run config backup process.""" now = datetime.now() LOGGER.debug("Pull Backup config repo.") - backup_repo = git_wrapper(self, GoldenConfigSetting.objects.first().backup_repository, "backup") + golden_settings = GoldenConfigSetting.objects.first() + + # Instantiate a GitRepo object for each GitRepository in GoldenConfigSettings. + backup_repos = [git_wrapper(self, repo, "backup") for repo in golden_settings.backup_repository.all()] + LOGGER.debug("Starting backup jobs to the following repos: %s", backup_repos) LOGGER.debug("Run nornir play.") - config_backup(self, data, backup_repo.path) + config_backup(self, data) - LOGGER.debug("Pull Backup config repo.") - backup_repo.commit_with_added(f"BACKUP JOB {now}") - backup_repo.push() + # Commit / Push each repo after job is completed. + for backup_repo in backup_repos: + LOGGER.debug("Pushing Backup config repo %s.", backup_repo.url) + backup_repo.commit_with_added(f"BACKUP JOB {now}") + backup_repo.push() class AllGoldenConfig(Job): diff --git a/nautobot_golden_config/migrations/0006_multi_repo_support_temp_field.py b/nautobot_golden_config/migrations/0006_multi_repo_support_temp_field.py new file mode 100644 index 00000000..5f8ab7a9 --- /dev/null +++ b/nautobot_golden_config/migrations/0006_multi_repo_support_temp_field.py @@ -0,0 +1,49 @@ +# Generated by Django 3.1.13 on 2021-12-07 06:37 + +from django.db import migrations, models + + +def convert_many_repos(apps, schema_editor): + """ + Add the current `backup_repository` and `intended_repository` objects + to the `many_to_many` additional intermediary attritbute to retain data.` + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + if settings_obj.backup_repository: + settings_obj.backup_repositories.add(settings_obj.backup_repository) + if settings_obj.intended_repository: + settings_obj.intended_repositories.add(settings_obj.intended_repository) + + +class Migration(migrations.Migration): + + dependencies = [ + ("extras", "0013_default_fallback_value_computedfield"), + ("nautobot_golden_config", "0005_json_compliance_rule"), + ] + + operations = [ + migrations.AddField( + model_name="goldenconfigsetting", + name="backup_repositories", + field=models.ManyToManyField( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, + related_name="backup_repositories", + to="extras.GitRepository", + ), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="intended_repositories", + field=models.ManyToManyField( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, + related_name="intended_repositories", + to="extras.GitRepository", + ), + ), + migrations.RunPython(convert_many_repos), + ] diff --git a/nautobot_golden_config/migrations/0007_multi_repo_support_convert_many.py b/nautobot_golden_config/migrations/0007_multi_repo_support_convert_many.py new file mode 100644 index 00000000..c10183eb --- /dev/null +++ b/nautobot_golden_config/migrations/0007_multi_repo_support_convert_many.py @@ -0,0 +1,70 @@ +# Generated by Django 3.1.13 on 2021-12-07 19:19 + +from django.db import migrations, models + + +def convert_many_repos(apps, schema_editor): + """ + Add the temp `backup_repositories` and `intended_repositories` objects + back to the new updated attribute with many-to-many relationships. + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + if settings_obj.backup_repositories.all(): + [settings_obj.backup_repository.add(backup_repo) for backup_repo in settings_obj.backup_repositories.all()] + if settings_obj.intended_repositories.all(): + [ + settings_obj.intended_repository.add(intended_repo) + for intended_repo in settings_obj.intended_repositories.all() + ] + + +class Migration(migrations.Migration): + + dependencies = [ + ("extras", "0013_default_fallback_value_computedfield"), + ("nautobot_golden_config", "0006_multi_repo_support_temp_field"), + ] + + operations = [ + migrations.AddField( + model_name="goldenconfigsetting", + name="backup_match_rule", + field=models.CharField(blank=True, max_length=255), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="intended_match_rule", + field=models.CharField(blank=True, max_length=255), + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="backup_repository", + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="backup_repository", + field=models.ManyToManyField( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, + related_name="backup_repository", + to="extras.GitRepository", + ), + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="intended_repository", + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="intended_repository", + field=models.ManyToManyField( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, + related_name="intended_repository", + to="extras.GitRepository", + ), + ), + migrations.RunPython(convert_many_repos), + ] diff --git a/nautobot_golden_config/migrations/0008_multi_repo_support_final.py b/nautobot_golden_config/migrations/0008_multi_repo_support_final.py new file mode 100644 index 00000000..b5cbf8a3 --- /dev/null +++ b/nautobot_golden_config/migrations/0008_multi_repo_support_final.py @@ -0,0 +1,25 @@ +# Generated by Django 3.1.13 on 2021-12-07 19:33 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0007_multi_repo_support_convert_many"), + ] + + operations = [ + migrations.AlterModelOptions( + name="goldenconfigsetting", + options={"verbose_name": "Golden Config Setting"}, + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="backup_repositories", + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="intended_repositories", + ), + ] diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index cc4150cc..e0bb4c9a 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -3,7 +3,6 @@ import logging import json from deepdiff import DeepDiff - from django.db import models from django.core.exceptions import ValidationError from django.core.serializers.json import DjangoJSONEncoder @@ -24,6 +23,7 @@ from nautobot_golden_config.utilities.utils import get_platform from nautobot_golden_config.utilities.constant import PLUGIN_CFG + LOGGER = logging.getLogger(__name__) GRAPHQL_STR_START = "query ($device_id: ID!)" @@ -410,20 +410,26 @@ def __str__(self): return f"{self.device}" +# pylint: disable=too-many-branches @extras_features( "graphql", ) class GoldenConfigSetting(PrimaryModel): """GoldenConfigSetting Model defintion. This provides global configs instead of via configs.py.""" - backup_repository = models.ForeignKey( + backup_repository = models.ManyToManyField( to="extras.GitRepository", - on_delete=models.SET_NULL, - null=True, blank=True, related_name="backup_repository", limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, ) + backup_match_rule = models.CharField( + max_length=255, + null=False, + blank=True, + verbose_name="Rule to match a device to a Backup Repository.", + help_text="The Jinja path representation of a Backup Repository slug. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `backup-{{obj.site.region.slug}}`", + ) backup_path_template = models.CharField( max_length=255, null=False, @@ -431,14 +437,19 @@ class GoldenConfigSetting(PrimaryModel): verbose_name="Backup Path in Jinja Template Form", help_text="The Jinja path representation of where the backup file will be found. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`", ) - intended_repository = models.ForeignKey( + intended_repository = models.ManyToManyField( to="extras.GitRepository", - on_delete=models.SET_NULL, - null=True, blank=True, related_name="intended_repository", limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, ) + intended_match_rule = models.CharField( + max_length=255, + null=False, + blank=True, + verbose_name="Rule to match a device to an Intended Repository.", + help_text="The Jinja path representation of a Intended Repository slug. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `intended-{{obj.site.region.slug}}`", + ) intended_path_template = models.CharField( max_length=255, null=False, @@ -486,11 +497,16 @@ def get_absolute_url(self): # pylint: disable=no-self-use def __str__(self): """Return a simple string if model is called.""" - return "Golden Config Settings" + return "Configuration Object" def delete(self, *args, **kwargs): """Enforce the singleton pattern, there is no way to delete the configurations.""" + class Meta: + """Set unique fields for model.""" + + verbose_name = "Golden Config Setting" + @classmethod def load(cls): """Enforce the singleton pattern, fail it somehow more than one instance.""" @@ -528,6 +544,26 @@ def clean(self): for key in self.scope.keys(): if key not in filterset_params: raise ValidationError({"scope": f"'{key}' is not a valid filter parameter for Device object"}) + # Backup Rule + if self.backup_repository.all().count() > 1: + if not self.backup_match_rule: + raise ValidationError( + "If you specify more than one backup repository, you must provide a backup repository matching rule template." + ) + elif self.backup_repository.all().count() == 1 and self.backup_match_rule: + raise ValidationError( + "If you configure only one backup repository, there is no need to specify the backup repository matching rule template." + ) + # Intended Rule + if self.intended_repository.all().count() > 1: + if not self.intended_match_rule: + raise ValidationError( + "If you specify more than one intended repository, you must provide a intended repository matching rule template." + ) + elif self.intended_repository.all().count() == 1 and self.intended_match_rule: + raise ValidationError( + "If you configure only one intended repository, there is no need to specify the intended repository matching rule template." + ) def get_queryset(self): """Generate a Device QuerySet from the filter.""" diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py index 4d8bdc0e..8cd9b8f8 100644 --- a/nautobot_golden_config/nornir_plays/config_backup.py +++ b/nautobot_golden_config/nornir_plays/config_backup.py @@ -17,6 +17,7 @@ from nautobot_golden_config.utilities.helper import ( get_job_filter, + get_repository_working_dir, verify_global_settings, render_jinja_template, ) @@ -32,7 +33,7 @@ def run_backup( # pylint: disable=too-many-arguments - task: Task, logger, global_settings, remove_regex_dict, replace_regex_dict, backup_root_folder + task: Task, logger, global_settings, remove_regex_dict, replace_regex_dict ) -> Result: r"""Backup configurations to disk. @@ -54,8 +55,9 @@ def run_backup( # pylint: disable=too-many-arguments backup_obj.backup_last_attempt_date = task.host.defaults.data["now"] backup_obj.save() + backup_directory = get_repository_working_dir("backup", obj, logger, global_settings) backup_path_template_obj = render_jinja_template(obj, logger, global_settings.backup_path_template) - backup_file = os.path.join(backup_root_folder, backup_path_template_obj) + backup_file = os.path.join(backup_directory, backup_path_template_obj) if global_settings.backup_test_connectivity is not False: task.run( @@ -81,12 +83,13 @@ def run_backup( # pylint: disable=too-many-arguments backup_obj.backup_last_success_date = task.host.defaults.data["now"] backup_obj.backup_config = running_config backup_obj.save() - logger.log_success(obj, "Successfully backed up device.") + + logger.log_success(obj, "Successfully extracted running configuration from device.") return Result(host=task.host, result=running_config) -def config_backup(job_result, data, backup_root_folder): +def config_backup(job_result, data): """Nornir play to backup configurations.""" now = datetime.now() logger = NornirLogger(__name__, job_result, data.get("debug")) @@ -130,7 +133,6 @@ def config_backup(job_result, data, backup_root_folder): global_settings=global_settings, remove_regex_dict=remove_regex_dict, replace_regex_dict=replace_regex_dict, - backup_root_folder=backup_root_folder, ) logger.log_debug("Completed configuration from devices.") @@ -138,4 +140,4 @@ def config_backup(job_result, data, backup_root_folder): logger.log_failure(None, err) raise - logger.log_debug("Completed configuration from devices.") + logger.log_debug("Completed configuration backup job for devices.") diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py index 1d594e3d..3a30ed80 100644 --- a/nautobot_golden_config/nornir_plays/config_compliance.py +++ b/nautobot_golden_config/nornir_plays/config_compliance.py @@ -20,6 +20,7 @@ from nautobot_golden_config.models import ComplianceRule, ConfigCompliance, GoldenConfigSetting, GoldenConfig from nautobot_golden_config.utilities.helper import ( get_job_filter, + get_repository_working_dir, verify_global_settings, render_jinja_template, ) @@ -56,8 +57,6 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals task: Task, logger, global_settings, - backup_root_path, - intended_root_folder, rules, ) -> Result: """Prepare data for compliance task. @@ -76,17 +75,18 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals compliance_obj.compliance_last_attempt_date = task.host.defaults.data["now"] compliance_obj.save() - intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) - - intended_file = os.path.join(intended_root_folder, intended_path_template_obj) + intended_directory = get_repository_working_dir("intended", obj, logger, global_settings) + intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) + intended_file = os.path.join(intended_directory, intended_path_template_obj) if not os.path.exists(intended_file): logger.log_failure(obj, f"Unable to locate intended file for device at {intended_file}") raise NornirNautobotException() - backup_template = render_jinja_template(obj, logger, global_settings.backup_path_template) - backup_file = os.path.join(backup_root_path, backup_template) + backup_directory = get_repository_working_dir("backup", obj, logger, global_settings) + backup_template = render_jinja_template(obj, logger, global_settings.backup_path_template) + backup_file = os.path.join(backup_directory, backup_template) if not os.path.exists(backup_file): logger.log_failure(obj, f"Unable to locate backup file for device at {backup_file}") raise NornirNautobotException() @@ -120,12 +120,12 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals compliance_obj.compliance_last_success_date = task.host.defaults.data["now"] compliance_obj.compliance_config = "\n".join(diff_files(backup_file, intended_file)) compliance_obj.save() - logger.log_success(obj, "Successfully tested compliance.") + logger.log_success(obj, "Successfully tested compliance job.") return Result(host=task.host) -def config_compliance(job_result, data, backup_root_path, intended_root_folder): +def config_compliance(job_result, data): """Nornir play to generate configurations.""" now = datetime.now() rules = get_rules() @@ -154,8 +154,6 @@ def config_compliance(job_result, data, backup_root_path, intended_root_folder): name="RENDER COMPLIANCE TASK GROUP", logger=logger, global_settings=global_settings, - backup_root_path=backup_root_path, - intended_root_folder=intended_root_folder, rules=rules, ) @@ -163,4 +161,4 @@ def config_compliance(job_result, data, backup_root_path, intended_root_folder): logger.log_failure(None, err) raise - logger.log_debug("Completed Compliance for devices.") + logger.log_debug("Completed compliance job for devices.") diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index cc7a433f..42078c39 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -21,6 +21,7 @@ from nautobot_golden_config.models import GoldenConfigSetting, GoldenConfig from nautobot_golden_config.utilities.helper import ( get_job_filter, + get_repository_working_dir, verify_global_settings, render_jinja_template, ) @@ -30,9 +31,12 @@ InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) LOGGER = logging.getLogger(__name__) +jinja_settings = Jinja2.get_default() +jinja_env = jinja_settings.env + def run_template( # pylint: disable=too-many-arguments - task: Task, logger, global_settings, nautobot_job, jinja_root_path, intended_root_folder + task: Task, logger, global_settings, nautobot_job, jinja_root_path ) -> Result: """Render Jinja Template. @@ -42,9 +46,8 @@ def run_template( # pylint: disable=too-many-arguments task (Task): Nornir task individual object logger (NornirLogger): Logger to log messages to. global_settings (GoldenConfigSetting): The settings for GoldenConfigPlugin. - nautobot_job (Result): The Nautobot Job instance being ran. + nautobot_job (Result): The the output from the Nautobot Job instance being run. jinja_root_path (str): The root path to the Jinja2 intended config file. - intended_root_folder (str): The root folder for rendered intended output configs. Returns: result (Result): Result from Nornir task @@ -57,27 +60,24 @@ def run_template( # pylint: disable=too-many-arguments intended_obj.intended_last_attempt_date = task.host.defaults.data["now"] intended_obj.save() - # Render output relative filepath and jinja template filenames - intended_output_filepath = render_jinja_template(obj, logger, global_settings.intended_path_template) - jinja_intended_template_filename = render_jinja_template(obj, logger, global_settings.jinja_path_template) + intended_directory = get_repository_working_dir("intended", obj, logger, global_settings) + intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) + output_file_location = os.path.join(intended_directory, intended_path_template_obj) - output_file_location = os.path.join(intended_root_folder, intended_output_filepath) + jinja_template = render_jinja_template(obj, logger, global_settings.jinja_path_template) status, device_data = graph_ql_query(nautobot_job.request, obj, global_settings.sot_agg_query) if status != 200: logger.log_failure(obj, f"The GraphQL query return a status of {str(status)} with error of {str(device_data)}") raise NornirNautobotException() task.host.data.update(device_data) - jinja_settings = Jinja2.get_default() - jinja_env = jinja_settings.env - generated_config = task.run( task=dispatcher, name="GENERATE CONFIG", method="generate_config", obj=obj, logger=logger, - jinja_template=jinja_intended_template_filename, + jinja_template=jinja_template, jinja_root_path=jinja_root_path, output_file_location=output_file_location, default_drivers_mapping=get_dispatcher(), @@ -92,15 +92,14 @@ def run_template( # pylint: disable=too-many-arguments return Result(host=task.host, result=generated_config) -def config_intended(nautobot_job, data, jinja_root_path, intended_root_folder): +def config_intended(nautobot_job, data, jinja_root_path): """ Nornir play to generate configurations. Args: - nautobot_job (Result): The Nautobot Job instance being ran. + nautobot_job (Result): The Nautobot Job instance being run. data (dict): Form data from Nautobot Job. jinja_root_path (str): The root path to the Jinja2 intended config file. - intended_root_folder (str): The root folder for rendered intended output configs. Returns: None: Intended configuration files are written to filesystem. @@ -134,7 +133,6 @@ def config_intended(nautobot_job, data, jinja_root_path, intended_root_folder): global_settings=global_settings, nautobot_job=nautobot_job, jinja_root_path=jinja_root_path, - intended_root_folder=intended_root_folder, ) except Exception as err: diff --git a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html index 97ff4404..549a0f6e 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html @@ -7,49 +7,28 @@ {% block title %}{{ object }}{% endblock %} {% block header %} -
-
- -
-
-
-
- - - - -
-
-
-
-
- {% if perms.nautobot_golden_config.change_goldenconfigsetting %} - - Edit - - {% endif %} -
-

{{ object }}

- {% include 'inc/created_updated.html' %} -
- {% custom_links object %} -
- +
+ {% if perms.nautobot_golden_config.change_goldenconfigsetting %} + + Edit + + {% endif %} +
+

Golden Configuration Settings

+{% include 'inc/created_updated.html' %} +
+ {% custom_links object %} +
+ {% endblock %} {% block content %} @@ -60,75 +39,101 @@

{{ object }}

}
-
+
Settings
- + + + + + - + - + + + + + - + - + - + - + -
Backup RepositoryBackup Repositories + {% for item in object.backup_repository.all %} {% if object.backup_repository %} - {{ object.backup_repository }} + {% include "nautobot_golden_config/manytomany.html" with url_name="extras:gitrepository" %} {% endif %} + {% endfor %} +
Backup Repository Matching Rule +
{{ object.backup_match_rule }}
Backup Path Template
{{ object.backup_path_template }}
+
{{ object.backup_path_template }}
+
Intended RepositoryIntended Repositories + {% for item in object.intended_repository.all %} {% if object.intended_repository %} - {{ object.intended_repository }} + {% include "nautobot_golden_config/manytomany.html" with url_name="extras:gitrepository" %} {% endif %} + {% endfor %} +
Intended Repository Matching Rule +
{{ object.intended_match_rule }}
Intended Path Template
{{ object.intended_path_template }}
+
{{ object.intended_path_template }}
+
Jinja Repository - {% if object.jinja_repository %} - {{ object.jinja_repository }} - {% endif %} + {% if object.jinja_repository %} + {{ object.jinja_repository }} + {% endif %}
Jinja Path Template
{{ object.jinja_path_template }}
+
{{ object.jinja_path_template }}
+
Backup Test Connectivity {% if object.backup_test_connectivity %} - - - + + + {% else %} - - - + + + {% endif %}
Scope of Devices
{{ object.scope|render_json }}
+
{{ object.scope|render_json }}
+
SoT Aggregation Query
{{ object.sot_agg_query }}
+
{{ object.sot_agg_query }}
+
-
- {% include 'inc/custom_fields_panel.html' %} - {% include 'inc/relationships_panel.html' %} +
+ {% include 'inc/custom_fields_panel.html' %} + {% include 'inc/relationships_panel.html' %} +
{% endblock %} \ No newline at end of file diff --git a/nautobot_golden_config/templates/nautobot_golden_config/manytomany.html b/nautobot_golden_config/templates/nautobot_golden_config/manytomany.html new file mode 100644 index 00000000..789a1bf8 --- /dev/null +++ b/nautobot_golden_config/templates/nautobot_golden_config/manytomany.html @@ -0,0 +1,6 @@ +{% if url_name %} + + {% endif %} + {{ item.name }} + {% if url_name %} +{% endif %} \ No newline at end of file diff --git a/nautobot_golden_config/tests/conftest.py b/nautobot_golden_config/tests/conftest.py index a09c99d2..ed2183ed 100644 --- a/nautobot_golden_config/tests/conftest.py +++ b/nautobot_golden_config/tests/conftest.py @@ -1,8 +1,9 @@ """Params for testing.""" from nautobot.dcim.models import Device, Site, Manufacturer, DeviceType, DeviceRole, Rack, RackGroup, Region, Platform from nautobot.tenancy.models import Tenant, TenantGroup -from nautobot.extras.models import Status - +from nautobot.extras.models import Status, GitRepository +from nautobot.extras.datasources.registry import get_datasource_contents +from django.utils.text import slugify from nautobot_golden_config.models import ConfigCompliance, ComplianceFeature, ComplianceRule from nautobot_golden_config.choices import ComplianceRuleTypeChoice @@ -147,6 +148,23 @@ def create_device(name="foobaz"): return device +def create_orphan_device(name="orphan"): + """Creates a Device to be used with tests.""" + parent_region, _ = Region.objects.get_or_create(name="Parent Region 4", slug="parent_region-4") + child_region, _ = Region.objects.get_or_create(name="Child Region 4", slug="child_region-4", parent=parent_region) + site, _ = Site.objects.get_or_create(name="Site 4", slug="site-4", region=child_region) + manufacturer, _ = Manufacturer.objects.get_or_create(name="Manufacturer 4", slug="manufacturer-4") + device_role, _ = DeviceRole.objects.get_or_create(name="Role 4", slug="role-4") + device_type, _ = DeviceType.objects.get_or_create( + manufacturer=manufacturer, model="Device Type 4", slug="device-type-4" + ) + platform, _ = Platform.objects.get_or_create(manufacturer=manufacturer, name="Platform 4", slug="platform-4") + device = Device.objects.create( + name=name, platform=platform, site=site, device_role=device_role, device_type=device_type + ) + return device + + def create_feature_rule_json(device, feature="foo", rule="json"): """Creates a Feature/Rule Mapping and Returns the rule.""" feature_obj, _ = ComplianceFeature.objects.get_or_create(slug=feature, name=feature) @@ -169,3 +187,113 @@ def create_config_compliance(device, compliance_rule=None, actual=None, intended intended=intended, ) return config_compliance + + +# """Fixture Models.""" +def create_git_repos() -> None: + """Create five instances of Git Repos. + + Two GitRepository objects provide Backups. + Two GitRepository objects provide Intended. + One GitRepository objects provide Jinja Templates. + The provided content is matched through a loop, in order to prevent any errors if object ID's change. + """ + name = "test-backup-repo-1" + provides = "nautobot_golden_config.backupconfigs" + git_repo_1 = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == provides + ], + ) + git_repo_1.save(trigger_resync=False) + + name = "test-backup-repo-2" + provides = "nautobot_golden_config.backupconfigs" + git_repo_2 = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == provides + ], + ) + git_repo_2.save(trigger_resync=False) + + name = "test-intended-repo-1" + provides = "nautobot_golden_config.intendedconfigs" + git_repo_3 = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == provides + ], + ) + git_repo_3.save(trigger_resync=False) + + name = "test-intended-repo-2" + provides = "nautobot_golden_config.intendedconfigs" + git_repo_4 = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == provides + ], + ) + git_repo_4.save(trigger_resync=False) + + name = "test-jinja-repo-1" + provides = "nautobot_golden_config.jinjatemplate" + git_repo_5 = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == provides + ], + ) + git_repo_5.save(trigger_resync=False) + + +def create_helper_repo(name="foobaz", provides=None): + """ + Create a backup and/or intended repo to test helper functions. + """ + content_provides = f"nautobot_golden_config.{provides}" + git_repo = GitRepository( + name=name, + slug=slugify(name), + remote_url=f"http://www.remote-repo.com/{name}.git", + branch="main", + username="CoolDeveloper_1", + provided_contents=[ + entry.content_identifier + for entry in get_datasource_contents("extras.gitrepository") + if entry.content_identifier == content_provides + ], + ) + git_repo.save(trigger_resync=False) diff --git a/nautobot_golden_config/tests/forms/__init__.py b/nautobot_golden_config/tests/forms/__init__.py new file mode 100644 index 00000000..8a889353 --- /dev/null +++ b/nautobot_golden_config/tests/forms/__init__.py @@ -0,0 +1 @@ +"""Unit Tests for Nautobot Golden Config Forms.""" diff --git a/nautobot_golden_config/tests/forms/test_golden_config_settings.py b/nautobot_golden_config/tests/forms/test_golden_config_settings.py new file mode 100644 index 00000000..50a2e2a6 --- /dev/null +++ b/nautobot_golden_config/tests/forms/test_golden_config_settings.py @@ -0,0 +1,77 @@ +"""Tests for Golden Configuration Settings Form.""" + +from django.test import TestCase +from nautobot.extras.models import GitRepository +from nautobot_golden_config.forms import GoldenConfigSettingFeatureForm +from nautobot_golden_config.models import GoldenConfigSetting +from nautobot_golden_config.tests.conftest import create_git_repos, create_device_data + + +class GoldenConfigSettingFormTest(TestCase): + """Test Golden Config Setting Feature Form.""" + + def setUp(self) -> None: + """Setup test data.""" + create_git_repos() + create_device_data() + # Since we enforce a singleton pattern on this model, nuke the auto-created object. + GoldenConfigSetting.objects.all().delete() + + def test_no_query_no_scope_success(self): + """Testing GoldenConfigForm without specifying a unique scope or GraphQL Query.""" + form = GoldenConfigSettingFeatureForm( + data={ + "backup_repository": [GitRepository.objects.get(name="test-backup-repo-1"), GitRepository.objects.get(name="test-backup-repo-2")], + "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", + "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1"), GitRepository.objects.get(name="test-intended-repo-2")], + "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", + "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", + "backup_test_connectivity": True, + } + ) + self.assertTrue(form.is_valid()) + self.assertTrue(form.save()) + + def test_clean_backup_template(self): + """Testing clean method for single backup repo with a matching pattern.""" + form = GoldenConfigSettingFeatureForm( + data={ + "backup_repository": [GitRepository.objects.get(name="test-backup-repo-2")], + "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", + "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1"), GitRepository.objects.get(name="test-intended-repo-2")], + "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", + "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", + "backup_test_connectivity": True, + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual(form.non_field_errors(), ["If you configure only one backup repository, there is no need to specify the backup repository matching rule template."]) + + def test_clean_intended_template(self): + """Testing clean method for single intended repo with a matching pattern.""" + form = GoldenConfigSettingFeatureForm( + data={ + "backup_repository": [GitRepository.objects.get(name="test-backup-repo-2")], + "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1")], + "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", + "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", + "backup_test_connectivity": True, + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual(form.non_field_errors(), ["If you configure only one intended repository, there is no need to specify the intended repository matching rule template."]) + + def test_clean_up(self): + """Transactional custom model, unable to use `get_or_create`. + + Delete all objects created of GitRepository type. + """ + GitRepository.objects.all().delete() + self.assertEqual(GitRepository.objects.all().count(), 0) + + # Put back a general GoldenConfigSetting object. + global_settings = GoldenConfigSetting.objects.create() + global_settings.save() diff --git a/nautobot_golden_config/tests/test_graphql.py b/nautobot_golden_config/tests/test_graphql.py index b8b3d42d..6e82faf2 100644 --- a/nautobot_golden_config/tests/test_graphql.py +++ b/nautobot_golden_config/tests/test_graphql.py @@ -118,14 +118,15 @@ def setUp(self): git_obj = GitRepository.objects.create(**item) git_obj.save() + backup_repo_list = GitRepository.objects.filter( + provided_contents__contains="nautobot_golden_config.backupconfigs" + ) + intended_repo_list = GitRepository.objects.filter( + provided_contents__contains="nautobot_golden_config.intendedconfigs" + ) + GoldenConfigSetting.objects.update( - backup_repository=GitRepository.objects.get( - provided_contents__contains="nautobot_golden_config.backupconfigs" - ), backup_path_template="test/backup", - intended_repository=GitRepository.objects.get( - provided_contents__contains="nautobot_golden_config.intendedconfigs" - ), intended_path_template="test/intended", jinja_repository=GitRepository.objects.get( provided_contents__contains="nautobot_golden_config.jinjatemplate" @@ -135,6 +136,8 @@ def setUp(self): scope={"platform": ["platform1"]}, sot_agg_query="{test_model}", ) + GoldenConfigSetting.objects.first().backup_repository.set(backup_repo_list) + GoldenConfigSetting.objects.first().intended_repository.set(intended_repo_list) self.feature1 = ComplianceFeature.objects.create( name="aaa", diff --git a/nautobot_golden_config/tests/test_models.py b/nautobot_golden_config/tests/test_models.py index f315baa1..231a7b04 100644 --- a/nautobot_golden_config/tests/test_models.py +++ b/nautobot_golden_config/tests/test_models.py @@ -1,9 +1,12 @@ """Unit tests for nautobot_golden_config models.""" +from json import loads as json_loads from django.test import TestCase -from django.core.exceptions import ValidationError from django.db.utils import IntegrityError +from django.core.exceptions import ValidationError from nautobot.dcim.models import Platform +from nautobot.extras.models import GitRepository +from nautobot_golden_config.tests.conftest import create_git_repos from nautobot_golden_config.models import ( ConfigCompliance, @@ -87,12 +90,27 @@ def setUp(self): """Get the golden config settings with the only allowed id.""" self.global_settings = GoldenConfigSetting.objects.first() + def test_absolute_url_success(self): + """Verify that get_absolute_url() returns the expected URL.""" + url_string = self.global_settings.get_absolute_url() + self.assertEqual(url_string, "/plugins/golden-config/setting/") + def test_bad_graphql_query(self): """Invalid graphql query.""" self.global_settings.sot_agg_query = 'devices(name:"ams-edge-01")' with self.assertRaises(ValidationError): self.global_settings.clean() + def test_bad_scope(self): + """Verify that a bad value in the scope returns the expected error.""" + self.global_settings.scope = json_loads('{"has_primary_ip": true, "role": ["Apple"]}') + with self.assertRaises(ValidationError) as error: + self.global_settings.clean() + self.assertEqual( + error.exception.messages[0], + "role: Select a valid choice. Apple is not one of the available choices.", + ) + def test_good_graphql_query_invalid_starts_with(self): """Valid graphql query, however invalid in the usage with golden config plugin.""" self.global_settings.sot_agg_query = '{devices(name:"ams-edge-01"){id}}' @@ -105,6 +123,150 @@ def test_good_graphql_query_validate_starts_with(self): self.global_settings.sot_agg_query = "query ($device_id: ID!) {device(id: $device_id) {id}}" self.assertEqual(self.global_settings.clean(), None) + def test_good_scope(self): + """Verify that the scope passes validation as expected.""" + self.global_settings.scope = json_loads('{"has_primary_ip": true}') + self.assertEqual(self.global_settings.clean(), None) + + +class GoldenConfigSettingGitModelTestCase(TestCase): + """Test GoldenConfigSetting Model.""" + + def setUp(self) -> None: + """Setup test data.""" + create_git_repos() + # Since we enforce a singleton pattern on this model, nuke the auto-created object. + GoldenConfigSetting.objects.all().delete() + + # Create fresh new object, populate accordingly. + + self.golden_config = GoldenConfigSetting.objects.create( # pylint: disable=attribute-defined-outside-init + backup_match_rule="backup-{{ obj.site.region.parent.slug }}", + backup_path_template="{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + intended_match_rule="intended-{{ obj.site.region.parent.slug }}", + intended_path_template="{{ obj.site.slug }}/{{ obj.name }}.cfg", + backup_test_connectivity=True, + jinja_repository=GitRepository.objects.get(name="test-jinja-repo-1"), + jinja_path_template="{{ obj.platform.slug }}/main.j2", + ) + self.golden_config.backup_repository.set( + [ + GitRepository.objects.get(name="test-backup-repo-1"), + GitRepository.objects.get(name="test-backup-repo-2"), + ] + ) + self.golden_config.intended_repository.set( + [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ] + ) + self.golden_config.save() + + def test_model_success(self): + """Create a new instance of the GoldenConfigSettings model.""" + + self.assertEqual(self.golden_config.backup_match_rule, "backup-{{ obj.site.region.parent.slug }}") + self.assertEqual(self.golden_config.backup_path_template, "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg") + self.assertEqual(self.golden_config.intended_match_rule, "intended-{{ obj.site.region.parent.slug }}") + self.assertEqual(self.golden_config.intended_path_template, "{{ obj.site.slug }}/{{ obj.name }}.cfg") + self.assertTrue(self.golden_config.backup_test_connectivity) + self.assertEqual(self.golden_config.jinja_repository, GitRepository.objects.get(name="test-jinja-repo-1")) + self.assertEqual(self.golden_config.jinja_path_template, "{{ obj.platform.slug }}/main.j2") + self.assertEqual( + self.golden_config.backup_repository.first(), GitRepository.objects.get(name="test-backup-repo-1") + ) + self.assertEqual( + self.golden_config.backup_repository.last(), GitRepository.objects.get(name="test-backup-repo-2") + ) + self.assertEqual( + self.golden_config.intended_repository.first(), GitRepository.objects.get(name="test-intended-repo-1") + ) + self.assertEqual( + self.golden_config.intended_repository.last(), GitRepository.objects.get(name="test-intended-repo-2") + ) + + def test_clean_gc_model_backup_repo(self): + """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. + + Leave just 1 backup repository, but with a matching repo rule. + """ + self.golden_config.backup_repository.first().delete() + with self.assertRaises(ValidationError) as error: + self.golden_config.validated_save() + self.assertEqual( + error.exception.messages[0], + "If you configure only one backup repository, there is no need to specify the backup repository matching rule template.", + ) + + def test_clean_gc_model_backup_repo_missing_template(self): + """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. + + 2 backup repositories, but with no matching rule. + """ + self.golden_config.backup_repository.set( + [ + GitRepository.objects.get(name="test-backup-repo-1"), + GitRepository.objects.get(name="test-backup-repo-2"), + ] + ) + self.golden_config.backup_match_rule = None + self.assertEqual(self.golden_config.backup_repository.all().count(), 2) + with self.assertRaises(ValidationError) as error: + self.golden_config.clean() + self.assertEqual( + error.exception.messages[0], + "If you specify more than one backup repository, you must provide a backup repository matching rule template.", + ) + + def test_clean_gc_model_intended_repo(self): + """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. + + Leave just 1 intended repository, but with a matching repo rule. + """ + self.golden_config.intended_repository.first().delete() + with self.assertRaises(ValidationError) as error: + self.golden_config.validated_save() + self.assertEqual( + error.exception.messages[0], + "If you configure only one intended repository, there is no need to specify the intended repository matching rule template.", + ) + + def test_clean_gc_model_intended_repo_missing_template(self): + """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. + + 2 intended repositories, but with no matching rule. + """ + self.golden_config.backup_repository.set( + [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ] + ) + self.golden_config.intended_match_rule = None + self.assertEqual(self.golden_config.intended_repository.all().count(), 2) + with self.assertRaises(ValidationError) as error: + self.golden_config.clean() + self.assertEqual( + error.exception.messages[0], + "If you specify more than one intended repository, you must provide a intended repository matching rule template.", + ) + + def test_removing_git_repos(self): + """Ensure we can remove the Git Repository obejcts from GoldenConfigSetting.""" + GitRepository.objects.all().delete() + self.assertEqual(self.golden_config.intended_repository.count(), 0) + self.assertEqual(self.golden_config.backup_repository.count(), 0) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 1) + + def test_clean_up(self): + """Delete all objects created of GitRepository type.""" + GoldenConfigSetting.objects.all().delete() + # Put back a general GoldenConfigSetting object. + global_settings = GoldenConfigSetting.objects.create() + global_settings.save() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 1) + class ConfigRemoveModelTestCase(TestCase): """Test ConfigRemove Model.""" diff --git a/nautobot_golden_config/tests/test_utilities/test_helpers.py b/nautobot_golden_config/tests/test_utilities/test_helpers.py index 1768b642..0052ac80 100644 --- a/nautobot_golden_config/tests/test_utilities/test_helpers.py +++ b/nautobot_golden_config/tests/test_utilities/test_helpers.py @@ -1,24 +1,47 @@ """Unit tests for nautobot_golden_config utilities helpers.""" -import unittest -from unittest.mock import patch +from unittest.mock import patch, MagicMock -from nautobot.dcim.models import Device +from django.test import TestCase from nornir_nautobot.exceptions import NornirNautobotException +from nornir_nautobot.utils.logger import NornirLogger from jinja2 import exceptions as jinja_errors -from nautobot_golden_config.utilities.helper import ( - null_to_empty, - render_jinja_template, -) + +from nautobot.dcim.models import Device +from nautobot.extras.models import GitRepository +from nautobot_golden_config.models import GoldenConfigSetting +from nautobot_golden_config.tests.conftest import create_device, create_orphan_device, create_helper_repo +from nautobot_golden_config.utilities.helper import null_to_empty, render_jinja_template, get_repository_working_dir # pylint: disable=no-self-use -class HelpersTest(unittest.TestCase): +class HelpersTest(TestCase): """Test Helper Functions.""" + def setUp(self): + """Setup a reusable mock object to pass into GitRepo.""" + self.repository_obj = MagicMock() + self.repository_obj.path = "/fake/path" + GitRepository.objects.all().delete() + create_helper_repo(name="backup-parent_region-1", provides="backupconfigs") + create_helper_repo(name="intended-parent_region-1", provides="intendedconfigs") + create_helper_repo(name="test-jinja-repo", provides="jinjatemplate") + self.global_settings = GoldenConfigSetting.objects.first() + self.global_settings.backup_repository.set([GitRepository.objects.get(name="backup-parent_region-1")]) + self.global_settings.intended_repository.set([GitRepository.objects.get(name="intended-parent_region-1")]) + self.global_settings.jinja_repository = GitRepository.objects.get(name="test-jinja-repo") + self.global_settings.backup_match_rule = "backup-{{ obj.site.region.parent.slug }}" + self.global_settings.intended_match_rule = "intended-{{ obj.site.region.parent.slug }}" + # Device.objects.all().delete() + create_device(name="test_device") + create_orphan_device(name="orphan_device") + self.job_result = MagicMock() + self.data = MagicMock() + self.logger = NornirLogger(__name__, self.job_result, self.data) + def test_null_to_empty_null(self): """Ensure None returns with empty string.""" result = null_to_empty(None) @@ -77,3 +100,47 @@ def test_render_jinja_template_exceptions_templateerror(self, template_mock, moc template_mock.side_effect = jinja_errors.TemplateRuntimeError render_jinja_template(mock_device, mock_nornir_logger, "template") mock_nornir_logger.log_failure.assert_called_once() + + def test_get_backup_repository_working_dir_success(self): + """Verify that we successfully look up the path from a provided repo object.""" + repo_type = "backup" + result = get_repository_working_dir( + repo_type, Device.objects.get(name="test_device"), self.logger, self.global_settings + ) + self.assertEqual(result, "/opt/nautobot/git/backup-parent_region-1") + + def test_get_intended_repository_working_dir_success(self): + """Verify that we successfully look up the path from a provided repo object.""" + repo_type = "intended" + result = get_repository_working_dir( + repo_type, Device.objects.get(name="test_device"), self.logger, self.global_settings + ) + self.assertEqual(result, "/opt/nautobot/git/intended-parent_region-1") + + def test_get_backup_repository_working_dir_no_match(self): + """Verify that we return the correct error when there is no matching backup repo.""" + repo_type = "backup" + logger = MagicMock() + result = get_repository_working_dir( + repo_type, Device.objects.get(name="orphan_device"), logger, self.global_settings + ) + self.assertEqual(result, None) + self.assertEqual(logger.log_failure.call_count, 1) + self.assertEqual( + logger.log_failure.call_args[0][1], + "There is no repository slug matching 'backup-parent_region-4' for device. Verify the matching rule and configured Git repositories.", + ) + + def test_get_intended_repository_working_dir_no_match(self): + """Verify that we return the correct error when there is no matching intended repo.""" + repo_type = "intended" + logger = MagicMock() + result = get_repository_working_dir( + repo_type, Device.objects.get(name="orphan_device"), logger, self.global_settings + ) + self.assertEqual(result, None) + self.assertEqual(logger.log_failure.call_count, 1) + self.assertEqual( + logger.log_failure.call_args[0][1], + "There is no repository slug matching 'intended-parent_region-4' for device. Verify the matching rule and configured Git repositories.", + ) diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index f849d54c..8b8f8f7e 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -3,13 +3,17 @@ from jinja2 import exceptions as jinja_errors -from nornir_nautobot.exceptions import NornirNautobotException -from nautobot.dcim.filters import DeviceFilterSet +from django import forms +from django.conf import settings + from nautobot.dcim.models import Device +from nautobot.dcim.filters import DeviceFilterSet from nautobot.utilities.utils import render_jinja2 -from nautobot_golden_config import models +from nornir_nautobot.exceptions import NornirNautobotException +from nornir_nautobot.utils.logger import NornirLogger +from nautobot_golden_config import models FIELDS = { "platform", @@ -102,3 +106,60 @@ def render_jinja_template(obj, logger, template): ) logger.log_failure(error_msg) raise NornirNautobotException from error + + +def clean_config_settings(repo_type: str, repo_count: int, match_rule: str): + """Custom clean for `GoldenConfigSettingFeatureForm`. + + Args: + repo_type (str): `intended` or `backup`. + repo_count (int): Total number of repos. + match_rule (str): Template str provided by user to match repos. + + Raises: + ValidationError: Custom Validation on form. + """ + if repo_count > 1: + if not match_rule: + raise forms.ValidationError( + f"If you specify more than one {repo_type} repository, you must provide a {repo_type} repository matching rule template." + ) + elif repo_count == 1 and match_rule: + raise forms.ValidationError( + f"If you configure only one {repo_type} repository, there is no need to specify the {repo_type} repository matching rule template." + ) + + +def get_repository_working_dir( + repo_type: str, + obj: Device, + logger: NornirLogger, + global_settings: models.GoldenConfigSetting, +) -> str: + """Match the Device to a repository working directory, based on the repository matching rule. + + Assume that the working directory == the slug of the repo. + + Args: + repo_type (str): Either `intended` or `backup` repository + obj (Device): Django ORM Device object. + logger (NornirLogger): Logger object + global_settings (models.GoldenConfigSetting): Golden Config global settings. + + Returns: + str: The local filesystem working directory corresponding to the repo slug. + """ + match_rule = getattr(global_settings, f"{repo_type}_match_rule") + + if not match_rule: + return global_settings.backup_repository.first().filesystem_path + + desired_repository_slug = render_jinja_template(obj, logger, match_rule) + matching_repo = getattr(global_settings, f"{repo_type}_repository").filter(slug=desired_repository_slug) + if len(matching_repo) == 1: + return f"{settings.GIT_ROOT}/{matching_repo[0].slug}" + logger.log_failure( + obj, + f"There is no repository slug matching '{desired_repository_slug}' for device. Verify the matching rule and configured Git repositories.", + ) + return None diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index 01685413..c309bd79 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -188,7 +188,7 @@ def alter_queryset(self, request): return pivot( self.queryset, ["device", "device__name"], - "rule__feature__slug", + "rule__feature__name", "compliance_int", aggregation=Max, ) diff --git a/pyproject.toml b/pyproject.toml index 4b8cec3e..8395dbb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ exclude = ''' | dist )/ | settings.py # This is where you define files that should not be stylized by black - # the root of the project + # the root of the project ) ''' [build-system] @@ -107,4 +107,4 @@ disable = """, notes = """, FIXME, XXX, - """ \ No newline at end of file + """ diff --git a/tasks.py b/tasks.py index 75f31069..3b9eb6bc 100644 --- a/tasks.py +++ b/tasks.py @@ -158,6 +158,13 @@ def stop(context): docker_compose(context, "down") +@task +def restart(context): + """Gracefully restart all containers.""" + print("Restarting Nautobot...") + docker_compose(context, "restart") + + @task def destroy(context): """Destroy all containers and volumes. @@ -233,15 +240,14 @@ def makemigrations(context, name=""): # TESTS / LINTING # ------------------------------------------------------------------------------ @task -def unittest(context): +def unittest(context, label="nautobot_golden_config"): """Run Django unit tests for the plugin. Args: context (obj): Used to run specific commands - nautobot_ver (str): Nautobot version to use to build the container - python_ver (str): Will use the Python version docker image to build from + label (str): Specify a directory or module to test instead of running all Nautobot Golden Config tests. """ - command = "nautobot-server test nautobot_golden_config" + command = f"nautobot-server test {label}" run_command(context, command) From 582f5c4a1959d4350421b3a02b5ca164e75addbe Mon Sep 17 00:00:00 2001 From: mitchell-foxworth <71849794+mitchell-foxworth@users.noreply.github.com> Date: Tue, 21 Dec 2021 14:55:38 -0500 Subject: [PATCH 16/36] Fixes #192: fix empty intended state repo and false compliance check issue (#193) * fix empty intended state repo and false compliance check --- nautobot_golden_config/utilities/helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index 8b8f8f7e..eb0491d2 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -152,7 +152,7 @@ def get_repository_working_dir( match_rule = getattr(global_settings, f"{repo_type}_match_rule") if not match_rule: - return global_settings.backup_repository.first().filesystem_path + return getattr(global_settings, f"{repo_type}_repository").first().filesystem_path desired_repository_slug = render_jinja_template(obj, logger, match_rule) matching_repo = getattr(global_settings, f"{repo_type}_repository").filter(slug=desired_repository_slug) From b3715e89c1f5f67468b22b9212d0bd5054f021ee Mon Sep 17 00:00:00 2001 From: Mikhail Yohman Date: Thu, 23 Dec 2021 09:18:51 -0700 Subject: [PATCH 17/36] Detailed Error Handling in get_job_filter helper (#194) * Update tasks nautobot version to 1.2.1. Update get_job_filter to raise with more detail and more checks. Add tests for get_job_filter. --- .github/workflows/ci.yml | 4 +- .../tests/test_utilities/test_helpers.py | 67 ++++++++++++++++++- nautobot_golden_config/utilities/helper.py | 16 ++++- tasks.py | 2 +- 4 files changed, 80 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 42c2d4a6..0180f784 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ --- name: "CI" -on: # yamllint disable +on: # yamllint disable - "push" - "pull_request" @@ -100,7 +100,7 @@ jobs: fail-fast: true matrix: python-version: ["3.6", "3.7", "3.8", "3.9"] - nautobot-version: ["1.0.1"] + nautobot-version: ["1.2.1"] runs-on: "ubuntu-20.04" env: INVOKE_NAUTOBOT_GOLDEN_CONFIG_PYTHON_VER: "${{ matrix.python-version }}" diff --git a/nautobot_golden_config/tests/test_utilities/test_helpers.py b/nautobot_golden_config/tests/test_utilities/test_helpers.py index 0052ac80..8a01dd18 100644 --- a/nautobot_golden_config/tests/test_utilities/test_helpers.py +++ b/nautobot_golden_config/tests/test_utilities/test_helpers.py @@ -8,11 +8,16 @@ from nornir_nautobot.utils.logger import NornirLogger from jinja2 import exceptions as jinja_errors -from nautobot.dcim.models import Device -from nautobot.extras.models import GitRepository +from nautobot.dcim.models import Device, Platform, Site +from nautobot.extras.models import GitRepository, Status from nautobot_golden_config.models import GoldenConfigSetting from nautobot_golden_config.tests.conftest import create_device, create_orphan_device, create_helper_repo -from nautobot_golden_config.utilities.helper import null_to_empty, render_jinja_template, get_repository_working_dir +from nautobot_golden_config.utilities.helper import ( + null_to_empty, + render_jinja_template, + get_job_filter, + get_repository_working_dir, +) # pylint: disable=no-self-use @@ -144,3 +149,59 @@ def test_get_intended_repository_working_dir_no_match(self): logger.log_failure.call_args[0][1], "There is no repository slug matching 'intended-parent_region-4' for device. Verify the matching rule and configured Git repositories.", ) + + def test_get_job_filter_no_data_success(self): + """Verify we get two devices returned when providing no data.""" + result = get_job_filter() + self.assertEqual(result.count(), 2) + + def test_get_job_filter_site_success(self): + """Verify we get a single device returned when providing specific site.""" + result = get_job_filter(data={"site": Site.objects.filter(slug="site-4")}) + self.assertEqual(result.count(), 1) + + def test_get_job_filter_device_object_success(self): + """Verify we get a single device returned when providing single device object.""" + result = get_job_filter(data={"device": Device.objects.get(name="test_device")}) + self.assertEqual(result.count(), 1) + + def test_get_job_filter_device_filter_success(self): + """Verify we get a single device returned when providing single device filter.""" + result = get_job_filter(data={"device": Device.objects.filter(name="test_device")}) + self.assertEqual(result.count(), 1) + + def test_get_job_filter_base_queryset_raise(self): + """Verify we get raise for having a base_qs with no objects due to bad Golden Config Setting scope.""" + Platform.objects.create(name="Placeholder Platform", slug="placeholder-platform") + golden_settings = GoldenConfigSetting.objects.first() + golden_settings.scope = {"platform": ["placeholder-platform"]} + golden_settings.validated_save() + with self.assertRaises(NornirNautobotException) as failure: + get_job_filter() + self.assertEqual( + failure.exception.args[0], + "The base queryset didn't find any devices. Please check the Golden Config Setting scope.", + ) + + def test_get_job_filter_filtered_devices_raise(self): + """Verify we get raise for having providing site that doesn't have any devices in scope.""" + Site.objects.create(name="New Site", slug="new-site", status=Status.objects.get(slug="active")) + with self.assertRaises(NornirNautobotException) as failure: + get_job_filter(data={"site": Site.objects.filter(name="New Site")}) + self.assertEqual( + failure.exception.args[0], + "The provided job parameters didn't match any devices detected by the Golden Config scope. Please check the scope defined within Golden Config Settings or select the correct job parameters to correctly match devices.", + ) + + def test_get_job_filter_device_no_platform_raise(self): + """Verify we get raise for not having a platform set on a device.""" + device = Device.objects.get(name="test_device") + device.platform = None + device.status = Status.objects.get(slug="active") + device.validated_save() + with self.assertRaises(NornirNautobotException) as failure: + get_job_filter() + self.assertEqual( + failure.exception.args[0], + "The following device(s) test_device have no platform defined. Platform is required.", + ) diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index eb0491d2..f1879999 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -44,12 +44,22 @@ def get_job_filter(data=None): query.update({"id": data["device"].values_list("pk", flat=True)}) base_qs = models.GoldenConfigSetting.objects.first().get_queryset() - if DeviceFilterSet(data=query, queryset=base_qs).qs.filter(platform__isnull=True).count() > 0: + if base_qs.count() == 0: raise NornirNautobotException( - f"The following device(s) {', '.join([device.name for device in DeviceFilterSet(data=query, queryset=base_qs).qs.filter(platform__isnull=True)])} have no platform defined. Platform is required." + "The base queryset didn't find any devices. Please check the Golden Config Setting scope." + ) + devices_filtered = DeviceFilterSet(data=query, queryset=base_qs) + if devices_filtered.qs.count() == 0: + raise NornirNautobotException( + "The provided job parameters didn't match any devices detected by the Golden Config scope. Please check the scope defined within Golden Config Settings or select the correct job parameters to correctly match devices." + ) + devices_no_platform = devices_filtered.qs.filter(platform__isnull=True) + if devices_no_platform.count() > 0: + raise NornirNautobotException( + f"The following device(s) {', '.join([device.name for device in devices_no_platform])} have no platform defined. Platform is required." ) - return DeviceFilterSet(data=query, queryset=base_qs).qs + return devices_filtered.qs def null_to_empty(val): diff --git a/tasks.py b/tasks.py index 3b9eb6bc..b1fef7b8 100644 --- a/tasks.py +++ b/tasks.py @@ -28,7 +28,7 @@ def is_truthy(arg): namespace.configure( { "nautobot_golden_config": { - "nautobot_ver": "1.0.1", + "nautobot_ver": "1.2.1", "project_name": "nautobot_golden_config", "python_ver": "3.7", "local": False, From fcb0d8ec81161bbd4754dc67820f2f98e48a5833 Mon Sep 17 00:00:00 2001 From: Ken Celenza Date: Thu, 23 Dec 2021 11:32:30 -0500 Subject: [PATCH 18/36] Ensure that git repos will sync when config compliance is ran (#198) * Ensure that git repos will sync when config compliance is ran --- nautobot_golden_config/jobs.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nautobot_golden_config/jobs.py b/nautobot_golden_config/jobs.py index 66a3fc16..5c14de63 100644 --- a/nautobot_golden_config/jobs.py +++ b/nautobot_golden_config/jobs.py @@ -94,8 +94,14 @@ class Meta: @commit_check def run(self, data, commit): # pylint: disable=too-many-branches """Run config compliance report script.""" - # pylint: disable-msg=too-many-locals # pylint: disable=unused-argument + + _ = [ + git_wrapper(self, repo, "intended") + for repo in GoldenConfigSetting.objects.first().intended_repository.all() + ] + _ = [git_wrapper(self, repo, "backup") for repo in GoldenConfigSetting.objects.first().backup_repository.all()] + config_compliance(self, data) From 265d28c7a6ac701bafa094e4fae1271077aae928 Mon Sep 17 00:00:00 2001 From: mitchell-foxworth <71849794+mitchell-foxworth@users.noreply.github.com> Date: Fri, 31 Dec 2021 17:29:01 -0500 Subject: [PATCH 19/36] Fixes #196 - Update Validation for GoldenConfigSettings to Serializer and form - (#197) * Update Validation for GoldenConfigSettings to Serializer and form --- nautobot_golden_config/api/serializers.py | 27 ++ nautobot_golden_config/models.py | 21 -- .../forms/test_golden_config_settings.py | 81 ++++- nautobot_golden_config/tests/test_api.py | 344 +++++++++++++++++- nautobot_golden_config/tests/test_models.py | 66 ---- nautobot_golden_config/utilities/helper.py | 4 +- 6 files changed, 448 insertions(+), 95 deletions(-) diff --git a/nautobot_golden_config/api/serializers.py b/nautobot_golden_config/api/serializers.py index 0546fc67..a92c6364 100644 --- a/nautobot_golden_config/api/serializers.py +++ b/nautobot_golden_config/api/serializers.py @@ -75,6 +75,33 @@ class Meta: model = models.GoldenConfigSetting fields = "__all__" + def validate(self, data): + """Verify that the values in the GoldenConfigSetting API call make sense.""" + validation_error_list = [] + + if len(data["backup_repository"]) == 1 and data["backup_match_rule"]: + validation_error_list.append( + "If you configure only one backup repository, do not enter the backup repository matching rule template." + ) + elif len(data["backup_repository"]) > 1 and not data["backup_match_rule"]: + validation_error_list.append( + "If you specify more than one backup repository, you must provide the backup repository matching rule template." + ) + + if len(data["intended_repository"]) == 1 and data["intended_match_rule"]: + validation_error_list.append( + "If you configure only one intended repository, do not enter the intended repository matching rule template." + ) + elif len(data["intended_repository"]) > 1 and not data["intended_match_rule"]: + validation_error_list.append( + "If you specify more than one intended repository, you must provide the intended repository matching rule template." + ) + + if validation_error_list: + raise serializers.ValidationError(validation_error_list) + + return data + class ConfigRemoveSerializer(TaggedObjectSerializer, CustomFieldModelSerializer): """Serializer for ConfigRemove object.""" diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index e0bb4c9a..2b3a8a36 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -410,7 +410,6 @@ def __str__(self): return f"{self.device}" -# pylint: disable=too-many-branches @extras_features( "graphql", ) @@ -544,26 +543,6 @@ def clean(self): for key in self.scope.keys(): if key not in filterset_params: raise ValidationError({"scope": f"'{key}' is not a valid filter parameter for Device object"}) - # Backup Rule - if self.backup_repository.all().count() > 1: - if not self.backup_match_rule: - raise ValidationError( - "If you specify more than one backup repository, you must provide a backup repository matching rule template." - ) - elif self.backup_repository.all().count() == 1 and self.backup_match_rule: - raise ValidationError( - "If you configure only one backup repository, there is no need to specify the backup repository matching rule template." - ) - # Intended Rule - if self.intended_repository.all().count() > 1: - if not self.intended_match_rule: - raise ValidationError( - "If you specify more than one intended repository, you must provide a intended repository matching rule template." - ) - elif self.intended_repository.all().count() == 1 and self.intended_match_rule: - raise ValidationError( - "If you configure only one intended repository, there is no need to specify the intended repository matching rule template." - ) def get_queryset(self): """Generate a Device QuerySet from the filter.""" diff --git a/nautobot_golden_config/tests/forms/test_golden_config_settings.py b/nautobot_golden_config/tests/forms/test_golden_config_settings.py index 50a2e2a6..9af047ce 100644 --- a/nautobot_golden_config/tests/forms/test_golden_config_settings.py +++ b/nautobot_golden_config/tests/forms/test_golden_config_settings.py @@ -21,10 +21,16 @@ def test_no_query_no_scope_success(self): """Testing GoldenConfigForm without specifying a unique scope or GraphQL Query.""" form = GoldenConfigSettingFeatureForm( data={ - "backup_repository": [GitRepository.objects.get(name="test-backup-repo-1"), GitRepository.objects.get(name="test-backup-repo-2")], + "backup_repository": [ + GitRepository.objects.get(name="test-backup-repo-1"), + GitRepository.objects.get(name="test-backup-repo-2"), + ], "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1"), GitRepository.objects.get(name="test-intended-repo-2")], + "intended_repository": [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ], "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", "backup_test_connectivity": True, @@ -40,14 +46,47 @@ def test_clean_backup_template(self): "backup_repository": [GitRepository.objects.get(name="test-backup-repo-2")], "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1"), GitRepository.objects.get(name="test-intended-repo-2")], + "intended_repository": [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ], "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", "backup_test_connectivity": True, } ) self.assertFalse(form.is_valid()) - self.assertEqual(form.non_field_errors(), ["If you configure only one backup repository, there is no need to specify the backup repository matching rule template."]) + self.assertEqual( + form.non_field_errors(), + ["If you configure only one backup repository, do not enter the backup repository matching rule template."], + ) + + def test_clean_backup_template_missing_match_rule(self): + """Testing clean method for multiple backup repos without a matching pattern.""" + form = GoldenConfigSettingFeatureForm( + data={ + "backup_repository": [ + GitRepository.objects.get(name="test-backup-repo-1"), + GitRepository.objects.get(name="test-backup-repo-2"), + ], + "backup_match_rule": "", + "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + "intended_repository": [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ], + "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", + "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", + "backup_test_connectivity": True, + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.non_field_errors(), + [ + "If you specify more than one backup repository, you must provide the backup repository matching rule template." + ], + ) def test_clean_intended_template(self): """Testing clean method for single intended repo with a matching pattern.""" @@ -62,7 +101,39 @@ def test_clean_intended_template(self): } ) self.assertFalse(form.is_valid()) - self.assertEqual(form.non_field_errors(), ["If you configure only one intended repository, there is no need to specify the intended repository matching rule template."]) + self.assertEqual( + form.non_field_errors(), + [ + "If you configure only one intended repository, do not enter the intended repository matching rule template." + ], + ) + + def test_clean_intended_template_missing_match_rule(self): + """Testing clean method for multiple intended repos without a matching pattern.""" + form = GoldenConfigSettingFeatureForm( + data={ + "backup_repository": [ + GitRepository.objects.get(name="test-backup-repo-1"), + GitRepository.objects.get(name="test-backup-repo-2"), + ], + "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", + "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + "intended_repository": [ + GitRepository.objects.get(name="test-intended-repo-1"), + GitRepository.objects.get(name="test-intended-repo-2"), + ], + "intended_match_rule": "", + "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", + "backup_test_connectivity": True, + } + ) + self.assertFalse(form.is_valid()) + self.assertEqual( + form.non_field_errors(), + [ + "If you specify more than one intended repository, you must provide the intended repository matching rule template." + ], + ) def test_clean_up(self): """Transactional custom model, unable to use `get_or_create`. diff --git a/nautobot_golden_config/tests/test_api.py b/nautobot_golden_config/tests/test_api.py index 8f3e3270..6ef0df03 100644 --- a/nautobot_golden_config/tests/test_api.py +++ b/nautobot_golden_config/tests/test_api.py @@ -1,12 +1,15 @@ """Unit tests for nautobot_golden_config.""" +from copy import deepcopy from django.contrib.auth import get_user_model from django.urls import reverse from rest_framework import status from nautobot.utilities.testing import APITestCase +from nautobot.extras.models import GitRepository +from nautobot_golden_config.models import GoldenConfigSetting -from .conftest import create_device, create_feature_rule_json, create_config_compliance +from .conftest import create_device, create_feature_rule_json, create_config_compliance, create_git_repos User = get_user_model() @@ -81,3 +84,342 @@ def test_config_compliance_post_new_json_not_compliant(self): ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertFalse(response.data["compliance"]) + + +class GoldenConfigSettingsAPITest(APITestCase): + """Verify that the combination of values in a GoldenConfigSettings object POST are valid.""" + + def setUp(self): + """Create a superuser and token for API calls.""" + super().setUp() + create_git_repos() + self.add_permissions("nautobot_golden_config.add_goldenconfigsetting") + self.add_permissions("nautobot_golden_config.change_goldenconfigsetting") + self.base_view = reverse("plugins-api:nautobot_golden_config-api:goldenconfigsetting-list") + self.data = { + "tags": [], + "computed_fields": {}, + "custom_fields": {}, + "_custom_field_data": {}, + "backup_match_rule": "backup-{{obj.site.region.parent.slug}}", + "backup_path_template": "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg", + "intended_match_rule": "intended-{{obj.site.region.parent.slug}}", + "intended_path_template": "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg", + "jinja_path_template": "templates/{{obj.platform.slug}}/{{obj.platform.slug}}_main.j2", + "backup_test_connectivity": False, + "scope": {"has_primary_ip": "True"}, + "sot_agg_query": "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", + "jinja_repository": str(GitRepository.objects.get(name="test-jinja-repo-1").id), + "backup_repository": [ + str(GitRepository.objects.get(name="test-backup-repo-1").id), + str(GitRepository.objects.get(name="test-backup-repo-2").id), + ], + "intended_repository": [ + str(GitRepository.objects.get(name="test-intended-repo-1").id), + str(GitRepository.objects.get(name="test-intended-repo-2").id), + ], + } + # Since we enforce a singleton pattern on this model, nuke any auto-created objects. + GoldenConfigSetting.objects.all().delete() + + def test_golden_config_settings_create_1backup_with_match_rule(self): + """Verify that an invalid POST with an unnecessary match_rule returns an error.""" + bad_data = deepcopy(self.data) + bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] + response = self.client.post( + self.base_view, + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you configure only one backup repository, do not enter the backup repository matching rule template.", + ) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_create_backup_match_rule_missing(self): + """Verify that an invalid POST with a missing backup_match_rule returns an error.""" + bad_data = deepcopy(self.data) + bad_data["backup_match_rule"] = "" + response = self.client.post( + self.base_view, + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you specify more than one backup repository, you must provide the backup repository matching rule template.", + ) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_create_1intended_with_match_rule(self): + """Verify that an invalid POST with an unnecessary match_rule returns an error.""" + bad_data = deepcopy(self.data) + bad_data["intended_repository"] = [str(GitRepository.objects.get(name="test-intended-repo-2").id)] + response = self.client.post( + self.base_view, + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you configure only one intended repository, do not enter the intended repository matching rule template.", + ) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_create_intended_match_rule_missing(self): + """Verify that an invalid POST with a missing intended_match_rule returns an error.""" + bad_data = deepcopy(self.data) + bad_data["intended_match_rule"] = "" + response = self.client.post( + self.base_view, + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you specify more than one intended repository, you must provide the intended repository matching rule template.", + ) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_create_multiple_problems(self): + """Verify that an invalid POST with multiple problems return multiple, correct errors.""" + bad_data = deepcopy(self.data) + bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] + bad_data["intended_match_rule"] = "" + response = self.client.post( + self.base_view, + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you configure only one backup repository, do not enter the backup repository matching rule template.", + ) + self.assertEqual( + response.data["non_field_errors"][1], + "If you specify more than one intended repository, you must provide the intended repository matching rule template.", + ) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_create_good(self): + """Test a POST with good values.""" + response = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertTrue(response.data["created"]) + self.assertTrue(response.data["id"]) + self.assertEqual(response.data["backup_match_rule"], "backup-{{obj.site.region.parent.slug}}") + self.assertEqual( + response.data["backup_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" + ) + self.assertEqual(response.data["intended_match_rule"], "intended-{{obj.site.region.parent.slug}}") + self.assertEqual( + response.data["intended_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" + ) + self.assertEqual( + response.data["jinja_path_template"], "templates/{{obj.platform.slug}}/{{obj.platform.slug}}_main.j2" + ) + self.assertFalse(response.data["backup_test_connectivity"]) + self.assertEqual(response.data["scope"], {"has_primary_ip": "True"}) + self.assertEqual( + response.data["sot_agg_query"], + "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", + ) + self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) + self.assertEqual( + response.data["backup_repository"], + [ + GitRepository.objects.get(name="test-backup-repo-1").id, + GitRepository.objects.get(name="test-backup-repo-2").id, + ], + ) + self.assertEqual( + response.data["intended_repository"], + [ + GitRepository.objects.get(name="test-intended-repo-1").id, + GitRepository.objects.get(name="test-intended-repo-2").id, + ], + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_update_good(self): + """Verify a PUT to the valid settings object, with valid but changed values.""" + response_post = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + new_data = deepcopy(self.data) + new_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] + new_data["backup_match_rule"] = "" + response = self.client.put( + f"{self.base_view}{response_post.data['id']}/", + data=new_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["backup_match_rule"], "") + self.assertEqual( + response.data["backup_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" + ) + self.assertEqual(response.data["intended_match_rule"], "intended-{{obj.site.region.parent.slug}}") + self.assertEqual( + response.data["intended_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" + ) + self.assertEqual( + response.data["jinja_path_template"], "templates/{{obj.platform.slug}}/{{obj.platform.slug}}_main.j2" + ) + self.assertFalse(response.data["backup_test_connectivity"]) + self.assertEqual(response.data["scope"], {"has_primary_ip": "True"}) + self.assertEqual( + response.data["sot_agg_query"], + "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", + ) + self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) + self.assertEqual( + response.data["backup_repository"], + [ + GitRepository.objects.get(name="test-backup-repo-1").id, + ], + ) + self.assertEqual( + response.data["intended_repository"], + [ + GitRepository.objects.get(name="test-intended-repo-1").id, + GitRepository.objects.get(name="test-intended-repo-2").id, + ], + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_update_1backup_with_match_rule(self): + """Verify a PUT to the valid settings object, with an invalid backup repo set, returns a 400.""" + response_post = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + bad_data = deepcopy(self.data) + bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] + response = self.client.put( + f"{self.base_view}{response_post.data['id']}/", + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you configure only one backup repository, do not enter the backup repository matching rule template.", + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_update_backup_match_rule_missing(self): + """Verify a PUT to the valid settings object, with an invalid backup repo set, returns a 400.""" + response_post = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + bad_data = deepcopy(self.data) + bad_data["backup_match_rule"] = "" + response = self.client.put( + f"{self.base_view}{response_post.data['id']}/", + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you specify more than one backup repository, you must provide the backup repository matching rule template.", + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_update_1intended_with_match_rule(self): + """Verify a PUT to the valid settings object, with an invalid intended repo set, returns a 400.""" + response_post = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + bad_data = deepcopy(self.data) + bad_data["intended_repository"] = [str(GitRepository.objects.get(name="test-intended-repo-1").id)] + response = self.client.put( + f"{self.base_view}{response_post.data['id']}/", + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you configure only one intended repository, do not enter the intended repository matching rule template.", + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_golden_config_settings_update_intended_match_rule_missing(self): + """Verify a PUT to the valid settings object, with an invalid intended repo set, returns a 400.""" + response_post = self.client.post( + self.base_view, + data=self.data, + format="json", + **self.header, + ) + bad_data = deepcopy(self.data) + bad_data["intended_match_rule"] = "" + response = self.client.put( + f"{self.base_view}{response_post.data['id']}/", + data=bad_data, + format="json", + **self.header, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.data["non_field_errors"][0], + "If you specify more than one intended repository, you must provide the intended repository matching rule template.", + ) + # Clean up + GoldenConfigSetting.objects.all().delete() + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) + + def test_settings_api_clean_up(self): + """Transactional custom model, unable to use `get_or_create`. + + Delete all objects created of GitRepository type. + """ + GitRepository.objects.all().delete() + self.assertEqual(GitRepository.objects.all().count(), 0) + + # Put back a general GoldenConfigSetting object. + global_settings = GoldenConfigSetting.objects.create() + global_settings.save() diff --git a/nautobot_golden_config/tests/test_models.py b/nautobot_golden_config/tests/test_models.py index 231a7b04..86b60203 100644 --- a/nautobot_golden_config/tests/test_models.py +++ b/nautobot_golden_config/tests/test_models.py @@ -186,72 +186,6 @@ def test_model_success(self): self.golden_config.intended_repository.last(), GitRepository.objects.get(name="test-intended-repo-2") ) - def test_clean_gc_model_backup_repo(self): - """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. - - Leave just 1 backup repository, but with a matching repo rule. - """ - self.golden_config.backup_repository.first().delete() - with self.assertRaises(ValidationError) as error: - self.golden_config.validated_save() - self.assertEqual( - error.exception.messages[0], - "If you configure only one backup repository, there is no need to specify the backup repository matching rule template.", - ) - - def test_clean_gc_model_backup_repo_missing_template(self): - """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. - - 2 backup repositories, but with no matching rule. - """ - self.golden_config.backup_repository.set( - [ - GitRepository.objects.get(name="test-backup-repo-1"), - GitRepository.objects.get(name="test-backup-repo-2"), - ] - ) - self.golden_config.backup_match_rule = None - self.assertEqual(self.golden_config.backup_repository.all().count(), 2) - with self.assertRaises(ValidationError) as error: - self.golden_config.clean() - self.assertEqual( - error.exception.messages[0], - "If you specify more than one backup repository, you must provide a backup repository matching rule template.", - ) - - def test_clean_gc_model_intended_repo(self): - """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. - - Leave just 1 intended repository, but with a matching repo rule. - """ - self.golden_config.intended_repository.first().delete() - with self.assertRaises(ValidationError) as error: - self.golden_config.validated_save() - self.assertEqual( - error.exception.messages[0], - "If you configure only one intended repository, there is no need to specify the intended repository matching rule template.", - ) - - def test_clean_gc_model_intended_repo_missing_template(self): - """Ensure we raise `ValidationError` on `GoldenConfigSetting` model. - - 2 intended repositories, but with no matching rule. - """ - self.golden_config.backup_repository.set( - [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ] - ) - self.golden_config.intended_match_rule = None - self.assertEqual(self.golden_config.intended_repository.all().count(), 2) - with self.assertRaises(ValidationError) as error: - self.golden_config.clean() - self.assertEqual( - error.exception.messages[0], - "If you specify more than one intended repository, you must provide a intended repository matching rule template.", - ) - def test_removing_git_repos(self): """Ensure we can remove the Git Repository obejcts from GoldenConfigSetting.""" GitRepository.objects.all().delete() diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index f1879999..3bc27cfe 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -132,11 +132,11 @@ def clean_config_settings(repo_type: str, repo_count: int, match_rule: str): if repo_count > 1: if not match_rule: raise forms.ValidationError( - f"If you specify more than one {repo_type} repository, you must provide a {repo_type} repository matching rule template." + f"If you specify more than one {repo_type} repository, you must provide the {repo_type} repository matching rule template." ) elif repo_count == 1 and match_rule: raise forms.ValidationError( - f"If you configure only one {repo_type} repository, there is no need to specify the {repo_type} repository matching rule template." + f"If you configure only one {repo_type} repository, do not enter the {repo_type} repository matching rule template." ) From 872cdad308d919b09049d228ca13437f08a048b9 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Mon, 14 Feb 2022 20:43:02 +0100 Subject: [PATCH 20/36] Add Git datasource to load GC properties (#206) * Add Git datasource to load GC properties --- docs/quick-start.md | 187 +++++++++++++----- nautobot_golden_config/datasources.py | 185 +++++++++++++++++ .../tests/test_datasources.py | 94 +++++++++ 3 files changed, 419 insertions(+), 47 deletions(-) create mode 100644 nautobot_golden_config/tests/test_datasources.py diff --git a/docs/quick-start.md b/docs/quick-start.md index baa2a4d2..fda9437b 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -8,34 +8,34 @@ Follow the steps below to get up and running for the configuration backup element of the plugin. -1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_backup": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_backup": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. 2. Add any git repositories that will be used to house the backup configurations. - 1. In the UI `Extensibility -> Git Repositories`. Click Add. - 2. Populate the Git Repository data for the backup. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **Provides** called `backup configs`. - 4. Click Create. + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the backup. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **Provides** called `backup configs`. + 4. Click Create. 3. Next, make sure to update the Plugins **Settings** with the backup details. - 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. - 2. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) - 3. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) - 4. Select whether or not to do a connectivity check per device. - 5. Click Save. + 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 2. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) + 3. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 4. Select whether or not to do a connectivity check per device. + 5. Click Save. 4. Create Configuration Removals and Replacements. - 1. [Config Removals](./navigating-backup.md#config-removals) - 2. [Config Replacements](./navigating-backup.md#config-replacements) + 1. [Config Removals](./navigating-backup.md#config-removals) + 2. [Config Replacements](./navigating-backup.md#config-replacements) 5. Execute the Backup. - 1. Navigate to `Plugins -> Home` under the Golden Configuration Section. - 2. Click on the `Execute` button and select `Backup`. - 3. Select what to run the backup on. - 4. Run the Job by clicking "Run Job" button. + 1. Navigate to `Plugins -> Home` under the Golden Configuration Section. + 2. Click on the `Execute` button and select `Backup`. + 3. Select what to run the backup on. + 4. Run the Job by clicking "Run Job" button. > For in-depth details see [Navigating Backup](./navigating-backup.md) @@ -45,41 +45,41 @@ Follow the steps below to get up and running for the intended configuration elem > Notice: Intended Configuration requires the `enable_intended` and `enabled_sotAgg` plugin features to be used. -1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_intended": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_intended": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. 2. Add any git repositories that will be used to house the intended configurations. - 1. In the UI `Extensibility -> Git Repositories`. Click Add. - 2. Populate the Git Repository data for the intended. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **Provides** called `intended configs`. - 4. Click Create. + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the intended. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **Provides** called `intended configs`. + 4. Click Create. 3. Add the git repository that will be used to house the Jinja2 templates. - 1. In the UI `Extensibility -> Git Repositories`. Click Add. - 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./navigating-golden.md#git-settings) - 3. Make sure to select the **Provides** called `jinja templates`. - 4. Click Create. + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **Provides** called `jinja templates`. + 4. Click Create. 4. Next, make sure to update the Plugins **Settings** with the intended and jinja2 template details. - 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. - 2. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) - 3. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) - 4. Fill out Jinja Repository. (The dropdown will show the repository that was just created.) - 5. Fill out Jinja Path Template. Typically `{{obj.platform.slug}}.j2`. + 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 2. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) + 3. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 4. Fill out Jinja Repository. (The dropdown will show the repository that was just created.) + 5. Fill out Jinja Path Template. Typically `{{obj.platform.slug}}.j2`. -4. Determine what data(variables) the Jinja2 templates need from Nautobot. +5. Determine what data(variables) the Jinja2 templates need from Nautobot. - 1. See [Source of Truth Agg Details](./navigating-sot-agg.md) - 2. Populate the SoTAgg field in the `Plugin -> Settings`. + 1. See [Source of Truth Agg Details](./navigating-sot-agg.md) + 2. Populate the SoTAgg field in the `Plugin -> Settings`. -5. Execute the Intended. +6. Execute the Intended. - 1. Navigate to `Plugins -> Home`. - 2. Click on the `Execute` button and select `Intended`. - 3. Select what to run the intended generation on. - 4. Run the Job. + 1. Navigate to `Plugins -> Home`. + 2. Click on the `Execute` button and select `Intended`. + 3. Select what to run the intended generation on. + 4. Run the Job. > For in-depth details see [Navigating Intended](./navigating-intended.md) @@ -87,24 +87,117 @@ Follow the steps below to get up and running for the intended configuration elem Compliance requires Backups and Intended Configurations in order to be executed. -1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_compliance": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. +1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_compliance": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. 2. Follow the steps in [Backup Configuration](#backup-configuration). 3. Follow the steps in [Intended Configuration](#intended-configuration). 4. Create a Compliance Feature. - 1. Navigate to `Plugins -> Compliance Feature`. - 2. Click Add and give the feature a name. Typically this is based on the configuration snippet or section. E.g. "aaa". + 1. Navigate to `Plugins -> Compliance Feature`. + 2. Click Add and give the feature a name. Typically this is based on the configuration snippet or section. E.g. "aaa". 5. Create a Compliance Rule. - 1. Navigate to `Plugins -> Compliance Rules`. - 2. Click Add and populate the fields, make sure the rule is linked to the feature created previously. See [Configuration Compliance Settings](./navigating-compliance.md#configuration-compliance-settings) for details. + 1. Navigate to `Plugins -> Compliance Rules`. + 2. Click Add and populate the fields, make sure the rule is linked to the feature created previously. See [Configuration Compliance Settings](./navigating-compliance.md#configuration-compliance-settings) for details. 6. Execute Compliance Check. - 1. Navigate to `Plugins -> Configuration Compliance`. - 2. Click on the `Execute` button and select `Compliance`. - 3. Select what to run the compliance on. - 4. Run the Job. + 1. Navigate to `Plugins -> Configuration Compliance`. + 2. Click on the `Execute` button and select `Compliance`. + 3. Select what to run the compliance on. + 4. Run the Job. > For in-depth details see [Navigating Compliance](./navigating-compliance.md) + +# Load Properties from Git + +Golden Config properties include: Compliance Features, Compliance Rules, Config Removals, and Config Replacements. They can be created via the UI, API, or alternatively you can load these properties from a Git repository, defined in YAML files following the this directory structure (you can skip any of them if not apply): + +``` +├── golden_config +│ ├── compliance_features +│ ├── compliance_rules +│ ├── config_removes +│ ├── config_replaces +``` + +The files within these folders can follow any naming pattern or nested folder structure, all of them will be recursively taken into account. So it's up to you to decide how to you prefer to organize these files (within the previously stated directory structure): + +```` +├── golden_config +│ ├── compliance_features +│ │ └── all.yml +│ ├── compliance_rules +│ │ ├── my_rule_for_cisco_ios +│ │ │ ├── some_rules.yml +│ │ │ └── some_other_rules.yml +│ │ └── juniper_junos.yml +│ ├── config_removes +│ │ ├── cisco_ios.yml +│ │ └── juniper_junos.yml +│ ├── config_replaces +│ │ ├── cisco_ios.yml +│ │ └── juniper_junos.yml +`` + +The `YAML` files will contain all the attributes necessary to identify an object (for instance, a `ComplianceRule` is identified by the `feature_slug` and the `platform_slug` together) and the other attributes (the ones that are not used to identify the object). For example: + +`compliance_features` example: + +```yaml +--- +- name: "aaa" + slug: "aaa" + description: "aaa feature" +```` + +`compliance_rules` example: + +```yaml +--- +- feature_slug: "aaa" + platform_slug: "cisco_ios" + config_ordered: true + match_config: | + aaa + line + username + role + tacacs + config_type: "CLI" +``` + +`config_removes` example: + +```yaml +--- +- platform_slug: "cisco_ios" + name: "Build config" + regex: '^Building\s+configuration.*\n' +``` + +`config_replaces` example: + +```yaml +--- +- name: "username" + platform_slug: "cisco_ios" + description: "username" + regex: '(username\s+\S+\spassword\s+5\s+)\S+(\s+role\s+\S+)' + replace: '\1\2' +``` + +> For Foreign Key references to `ComplianceFeature` and `Platform` we use the keywords `feature_slug` and `platform_slug` respectively. + +1. Add the Git repository that will be used to sync Git properties. + + 1. In the UI `Extensibility -> Git Repositories`. Click Add. + 2. Populate the Git Repository data for the GC properties. [Git Settings](./navigating-golden.md#git-settings) + 3. Make sure to select the **Provides** called `Golden Config properties`. + 4. Click Create (This step runs an automatic sync). + +2. Run `sync` and all the properties will be created/updated in a declarative way and following the right order to respect the dependencies between objects. The import task will raise a `warning` if the dependencies are not available yet (for instance, a referenced `Platform` is not created), so the `sync` process will continue, and you could then fix these warnings by reviewing the mismatch (maybe creating the required object) and run the `sync` process again. + +``` + +``` diff --git a/nautobot_golden_config/datasources.py b/nautobot_golden_config/datasources.py index 8f744975..0b18852a 100644 --- a/nautobot_golden_config/datasources.py +++ b/nautobot_golden_config/datasources.py @@ -1,8 +1,13 @@ """Data source plugin extension to register additional git repo types.""" +import os +from django.db import IntegrityError +import yaml from nautobot.extras.choices import LogLevelChoices from nautobot.extras.registry import DatasourceContent +from nautobot.dcim.models.devices import Platform from nautobot_golden_config.utilities.constant import ENABLE_BACKUP, ENABLE_COMPLIANCE, ENABLE_INTENDED +from nautobot_golden_config.models import ComplianceFeature, ComplianceRule, ConfigReplace, ConfigRemove def refresh_git_jinja(repository_record, job_result, delete=False): # pylint: disable=unused-argument @@ -29,6 +34,174 @@ def refresh_git_backup(repository_record, job_result, delete=False): # pylint: ) +def refresh_git_gc_properties(repository_record, job_result, delete=False): # pylint: disable=unused-argument + """Callback for gitrepository updates on Git Configuration repo. + + Expected folder structure: + ├── golden_config + │ ├── compliance_features + │ ├── compliance_rules + │ ├── config_removes + │ ├── config_replaces + + """ + golden_config_path = os.path.join(repository_record.filesystem_path, "golden_config") + if not os.path.isdir(golden_config_path): + job_result.log( + f"Skipping sync for {golden_config_path} because directory doesn't exist.", + level_choice=LogLevelChoices.LOG_INFO, + ) + return + + # gc_config_items parametrize the method to import the different GC properties + # "directory_name": is the directory name under golden_config + # "class": is the Django model related to the property + # "id_keys": is a tuple of tuples, defining the attributes that identify an instance. The inner tuple + # defines the mapping from the YAML to the actual attribute name. + gc_config_items = ( + { + "directory_name": "compliance_features", + "class": ComplianceFeature, + "id_keys": (("name", "name"),), + }, + { + "directory_name": "compliance_rules", + "class": ComplianceRule, + "id_keys": ( + ("feature", "feature_slug"), + ("platform", "platform_slug"), + ), + }, + { + "directory_name": "config_removes", + "class": ConfigRemove, + "id_keys": ( + ("name", "name"), + ("platform", "platform_slug"), + ), + }, + { + "directory_name": "config_replaces", + "class": ConfigReplace, + "id_keys": ( + ("name", "name"), + ("platform", "platform_slug"), + ), + }, + ) + + for gc_config_item in gc_config_items: + update_git_gc_properties(golden_config_path, job_result, gc_config_item) + + job_result.log( + "Successfully Completed sync of Golden Config properties", + level_choice=LogLevelChoices.LOG_SUCCESS, + ) + + +class MissingReference(Exception): + """Custom error to signal a missing FK reference when looking up.""" + + +def get_id_kwargs(gc_config_item_dict, id_keys, job_result): + """Method to get the proper id kwargs and remove them from gc_config_item_dict.""" + # fk_slug_class_mapping contains a mapping of the FK attributes to the related model + fk_slug_class_mapping = {"feature": ComplianceFeature, "platform": Platform} + + id_kwargs = {} + for id_key in id_keys: + actual_attr_name = id_key[0] + yaml_attr_name = id_key[1] + + # If the attribute is actually a FK reference, we need to resolve the related object + if actual_attr_name in fk_slug_class_mapping: + try: + id_kwargs[actual_attr_name] = fk_slug_class_mapping[actual_attr_name].objects.get( + slug=gc_config_item_dict[yaml_attr_name] + ) + except fk_slug_class_mapping[actual_attr_name].DoesNotExist: + job_result.log( + ( + f"Reference to {yaml_attr_name}: {gc_config_item_dict[yaml_attr_name]}", + "is not available.", + ), + level_choice=LogLevelChoices.LOG_WARNING, + ) + raise MissingReference from fk_slug_class_mapping[actual_attr_name].DoesNotExist + else: + id_kwargs[actual_attr_name] = gc_config_item_dict[yaml_attr_name] + + # We remove the attributes used to indentify the item from the defaults dictionary + del gc_config_item_dict[yaml_attr_name] + + return id_kwargs + + +def update_git_gc_properties(golden_config_path, job_result, gc_config_item): # pylint: disable=too-many-locals + """Refresh any compliance features provided by this Git repository.""" + gc_config_item_path = os.path.join(golden_config_path, gc_config_item["directory_name"]) + if not os.path.isdir(gc_config_item_path): + job_result.log( + f"Skipping sync for {gc_config_item['directory_name']} because directory doesn't exist.", + level_choice=LogLevelChoices.LOG_INFO, + ) + return + + property_model = gc_config_item["class"] + + job_result.log( + f"Refreshing {property_model.__name__}...", + level_choice=LogLevelChoices.LOG_INFO, + ) + + file_names = [] + for root, _, files in os.walk(gc_config_item_path): + for file_name in files: + if not any(file_name.endswith(yaml_extension) for yaml_extension in (".yml", ".yaml")): + continue + file_names.append({"root": root, "file_name": file_name}) + + for details in file_names: + root = details["root"] + file_name = details["file_name"] + + with open(os.path.join(root, file_name), "r", encoding="utf-8") as yaml_file: + try: + gc_config_item_dict = yaml.safe_load(yaml_file) + + except yaml.YAMLError as exc: + job_result.log( + f"Error loading {os.path.join(root, file_name)}: {exc}", + level_choice=LogLevelChoices.LOG_WARNING, + ) + continue + + try: + id_kwargs = get_id_kwargs(gc_config_item_dict, gc_config_item["id_keys"], job_result) + item, created = gc_config_item["class"].objects.update_or_create(**id_kwargs, defaults=gc_config_item_dict) + + if created: + job_result.log( + f"New {property_model.__name__} created: {item}", + level_choice=LogLevelChoices.LOG_SUCCESS, + ) + else: + job_result.log( + f"Updated {property_model.__name__}: {item}", + level_choice=LogLevelChoices.LOG_SUCCESS, + ) + + except MissingReference: + continue + + except IntegrityError as exc: + job_result.log( + f"Issue seen with attribute values: {exc}", + level_choice=LogLevelChoices.LOG_WARNING, + ) + continue + + datasource_contents = [] if ENABLE_INTENDED or ENABLE_COMPLIANCE: datasource_contents.append( @@ -66,3 +239,15 @@ def refresh_git_backup(repository_record, job_result, delete=False): # pylint: ), ) ) + +datasource_contents.append( + ( + "extras.gitrepository", + DatasourceContent( + name="Golden Config properties", + content_identifier="nautobot_golden_config.pluginproperties", + icon="mdi-file-code", + callback=refresh_git_gc_properties, + ), + ) +) diff --git a/nautobot_golden_config/tests/test_datasources.py b/nautobot_golden_config/tests/test_datasources.py new file mode 100644 index 00000000..c15415d6 --- /dev/null +++ b/nautobot_golden_config/tests/test_datasources.py @@ -0,0 +1,94 @@ +"""Unit tests for nautobot_golden_config datasources.""" + +from unittest.mock import Mock +from django.test import TestCase + +from nautobot.dcim.models import Platform +from nautobot_golden_config.models import ComplianceFeature +from nautobot_golden_config.datasources import get_id_kwargs, MissingReference + + +class GitPropertiesDatasourceTestCase(TestCase): + """Test Git GC Properties datasource.""" + + def setUp(self): + """Setup Object.""" + self.platform = Platform.objects.create(slug="example_platform") + self.compliance_feature = ComplianceFeature.objects.create(slug="example_feature") + self.job_result = Mock() + + def test_get_id_kwargs_1(self): + """Test simple get_id_kwargs 1.""" + initial_gc_config_item_dict = {"name": "some name"} + gc_config_item_dict = initial_gc_config_item_dict.copy() + id_kwargs = get_id_kwargs( + gc_config_item_dict, + (("name", "name"),), + self.job_result, + ) + self.assertEqual(id_kwargs, initial_gc_config_item_dict) + self.assertEqual(gc_config_item_dict, {}) + + def test_get_id_kwargs_2(self): + """Test simple get_id_kwargs 2.""" + gc_config_item_dict = {"name": "some name", "description": "some description"} + id_kwargs = get_id_kwargs( + gc_config_item_dict, + (("name", "name"),), + self.job_result, + ) + self.assertEqual(id_kwargs, {"name": "some name"}) + self.assertEqual(gc_config_item_dict, {"description": "some description"}) + + def test_get_id_kwargs_3(self): + """Test simple get_id_kwargs 3.""" + gc_config_item_dict = {"name": "some name", "description": "some description"} + id_kwargs = get_id_kwargs( + gc_config_item_dict, + (), + self.job_result, + ) + self.assertEqual(id_kwargs, {}) + self.assertEqual(gc_config_item_dict, gc_config_item_dict) + + def test_get_id_kwargs_4(self): + """Test simple get_id_kwargs .""" + gc_config_item_dict = {"platform_slug": "invalid_platform"} + with self.assertRaises(MissingReference): + get_id_kwargs( + gc_config_item_dict, + (("platform", "platform_slug"),), + self.job_result, + ) + + def test_get_id_kwargs_5(self): + """Test simple get_id_kwargs 5.""" + gc_config_item_dict = {"platform_slug": "example_platform"} + id_kwargs = get_id_kwargs( + gc_config_item_dict, + (("platform", "platform_slug"),), + self.job_result, + ) + self.assertEqual(id_kwargs, {"platform": self.platform}) + self.assertEqual(gc_config_item_dict, {}) + + def test_get_id_kwargs_6(self): + """Test simple get_id_kwargs 6.""" + gc_config_item_dict = {"feature_slug": "invalid_feature"} + with self.assertRaises(MissingReference): + get_id_kwargs( + gc_config_item_dict, + (("feature", "feature_slug"),), + self.job_result, + ) + + def test_get_id_kwargs_7(self): + """Test simple get_id_kwargs 7.""" + gc_config_item_dict = {"feature_slug": "example_feature"} + id_kwargs = get_id_kwargs( + gc_config_item_dict, + (("feature", "feature_slug"),), + self.job_result, + ) + self.assertEqual(id_kwargs, {"feature": self.compliance_feature}) + self.assertEqual(gc_config_item_dict, {}) From 2406be87a4d5a8b0e7637cb9333d33ccd8a07b2f Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Thu, 17 Feb 2022 08:23:13 +0100 Subject: [PATCH 21/36] Fix issue git properties import (#209) --- nautobot_golden_config/datasources.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/nautobot_golden_config/datasources.py b/nautobot_golden_config/datasources.py index 0b18852a..58f8c1e7 100644 --- a/nautobot_golden_config/datasources.py +++ b/nautobot_golden_config/datasources.py @@ -131,7 +131,7 @@ def get_id_kwargs(gc_config_item_dict, id_keys, job_result): else: id_kwargs[actual_attr_name] = gc_config_item_dict[yaml_attr_name] - # We remove the attributes used to indentify the item from the defaults dictionary + # We remove the attributes used to identify the item from the defaults dictionary del gc_config_item_dict[yaml_attr_name] return id_kwargs @@ -167,7 +167,7 @@ def update_git_gc_properties(golden_config_path, job_result, gc_config_item): # with open(os.path.join(root, file_name), "r", encoding="utf-8") as yaml_file: try: - gc_config_item_dict = yaml.safe_load(yaml_file) + gc_config_property_list = yaml.safe_load(yaml_file) except yaml.YAMLError as exc: job_result.log( @@ -177,17 +177,18 @@ def update_git_gc_properties(golden_config_path, job_result, gc_config_item): # continue try: - id_kwargs = get_id_kwargs(gc_config_item_dict, gc_config_item["id_keys"], job_result) - item, created = gc_config_item["class"].objects.update_or_create(**id_kwargs, defaults=gc_config_item_dict) - - if created: - job_result.log( - f"New {property_model.__name__} created: {item}", - level_choice=LogLevelChoices.LOG_SUCCESS, + for item_dict in gc_config_property_list: + id_kwargs = get_id_kwargs(item_dict, gc_config_item["id_keys"], job_result) + item, created = gc_config_item["class"].objects.update_or_create(**id_kwargs, defaults=item_dict) + + log_message = ( + f"New {property_model.__name__} created: {item}" + if created + else f"Updated {property_model.__name__}: {item}" ) - else: + job_result.log( - f"Updated {property_model.__name__}: {item}", + log_message, level_choice=LogLevelChoices.LOG_SUCCESS, ) From f887c5512db1502b40857a153ef36dde4085fcf0 Mon Sep 17 00:00:00 2001 From: Christian Adell Date: Thu, 17 Feb 2022 16:00:06 +0100 Subject: [PATCH 22/36] Update CLI reference to lowercase (#210) --- docs/quick-start.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quick-start.md b/docs/quick-start.md index fda9437b..c2dc1776 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -164,7 +164,7 @@ The `YAML` files will contain all the attributes necessary to identify an object username role tacacs - config_type: "CLI" + config_type: "cli" ``` `config_removes` example: From 1ae91c8476b8a7c932c030621a6523f67ce167b3 Mon Sep 17 00:00:00 2001 From: mzb Date: Tue, 22 Feb 2022 15:48:22 +0100 Subject: [PATCH 23/36] Multiple Golden Config Settings (#205) * Multiple GoldenConfigSetting instances --- CHANGELOG.md | 9 +- README.md | 2 + docs/img/navigate-compliance-rules.png | Bin 44839 -> 56418 bytes docs/installation.md | 2 +- docs/navigating-backup.md | 57 +-- docs/navigating-golden.md | 17 +- docs/navigating-intended.md | 2 +- docs/quick-start.md | 22 +- docs/upgrade.md | 10 + nautobot_golden_config/api/serializers.py | 27 -- nautobot_golden_config/api/views.py | 5 +- nautobot_golden_config/forms.py | 31 +- nautobot_golden_config/jobs.py | 43 +-- .../migrations/0009_multiple_gc_settings.py | 123 ++++++ nautobot_golden_config/models.py | 53 ++- nautobot_golden_config/navigation.py | 14 +- .../nornir_plays/config_backup.py | 26 +- .../nornir_plays/config_compliance.py | 32 +- .../nornir_plays/config_intended.py | 35 +- nautobot_golden_config/tables.py | 52 +++ .../goldenconfigsetting.html | 364 ++++++++++++------ .../goldenconfigsetting_edit.html | 42 ++ .../forms/test_golden_config_settings.py | 116 +----- nautobot_golden_config/tests/test_api.py | 242 +----------- nautobot_golden_config/tests/test_graphql.py | 36 +- nautobot_golden_config/tests/test_models.py | 79 ++-- .../tests/test_utilities/test_helpers.py | 125 +++--- nautobot_golden_config/urls.py | 12 +- nautobot_golden_config/utilities/helper.py | 74 +--- nautobot_golden_config/views.py | 69 ++-- 30 files changed, 828 insertions(+), 893 deletions(-) create mode 100644 docs/upgrade.md create mode 100644 nautobot_golden_config/migrations/0009_multiple_gc_settings.py create mode 100644 nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting_edit.html diff --git a/CHANGELOG.md b/CHANGELOG.md index f3a2c824..2bd48518 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,15 +1,14 @@ # Changelog -## v1.0.0 - 2021-XX-XX +## v1.0.0 - 2022-XX-XX ### Added -- GoldenConfigSettings enforces a `template path` be provided if more than 1 backup or intended repository is configured. -- Updated backup job to execute against multiple repos if available based on pattern matching. -- Updated intended job to execute against multiple repos if available based on pattern matching. -- Updated compliance job to execute from multiple repos if available based on pattern matching. - Added utility function to determine the local filesystem path which stores the backup and intended repository files for a given device. +### Changed +- [#205](https://github.com/nautobot/nautobot-plugin-golden-config/pull/205) - Multiple Golden Config Settings allows for multiple instances of the plugin settings + ## v0.9.10 - 2021-11 ### Announcements diff --git a/README.md b/README.md index a24dc41f..44c11bc0 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,8 @@ but not want to use other features, which is perfectly fine to do so. - [Navigating JSON Compliance](./docs/navigating-compliance-json.md) - [Navigating Custom Compliance](./docs/navigating-compliance-custom.md) - [FAQ](./docs/FAQ.md) +- [Upgrade Notes](./docs/upgrade.md) + ## Screenshots diff --git a/docs/img/navigate-compliance-rules.png b/docs/img/navigate-compliance-rules.png index da8e609fdef8e8f94cb742ec5a2fe7753e68c67d..7d5cb0dd3f544e2668f949eb02e40e9159f02c8a 100644 GIT binary patch literal 56418 zcmd?QbyQrzw>3xz1eX8-5}e@f?(Q1gf|KCxmf#RvLU0Iy;O=gXyK8WF_qiR&@4ffU zH?!8P`EQ0mX!_ndb*k#zs(to89jqWHjs*V>9s&XaNm4>Y2?FB92?PXWJnSproibdU z7Z4DLCZB|b6(og)2^H+DO+Hx|LqJFb$0fk1EB9dfY{q{;(6JyQ(#OCLFMJ+eMw#~0lRnBA-Ih1AiN$JBq)r9<}v&ZKu@Q5YrH)RVKT1A&NK2tO{3U$*mN}b>g!6F9($;! zJ+-(4(_Bp+Ho9?M`AqD=S*6~>G%H9m-YPV5GQKj<5LuW~(T0TP!fXm_WFo&E8S)Jn z*-a$Gi^9~dtzRb6MvM97?i+4voE*7)YogI(#GqHDia#=7(bgc83;VDMGe`Od<;EJS zR+nU*1Zn0qC7BpXCozV9z;8p)tmr6-Y{RsUhM+`)iA6CDyOJ$W=$Q4?}6(1*I0BSMvLzhZqcYk|)2fsQRP`SOVnMq<|eo!g(JI&$12 zUtFV?uWd#o0>pkIUPvr6zrgs)PZaE9;G*6Au^1LZ=o+N|`(_)m?3FtLjQH9Y@yJs_ zjP9?8y{(F)%QMlf!W2<=hE6ZeSW#3}miUcvDS{^@VuhVFxP*UnFc?rUMq|r4bjThF z%o9We41H8zuw^^oUfVbIYn5bn2`c!Jzdd^nCtC;SWZ^wmXHIbX0U2j{fOHchZ}NZh*PHg z2p{4{O>Fe?4s8auu1zd*k?V7PREmOKQ`%2M(Qm9v^m#LRc3Vj_iJ9Jcj zB5PLpWp3~|?hfQ<(#mkRXt$&rC28qCsXjIHFYzMgO{&?Ge1&70mlca6XP@L)=x9@Z z{iMy_PiGyo8r2=$cHnkn;Qgj6peI$Fk(R!c-8^Ap7BRW;388ZHliH^#bG=coU&14$ z6U$pNBb1ZBhdI;j1#`p`GR<@PCf0WcCkjV-f4b{b%SU8v{=&*nD6ST)j!1}chm7z)B!Z6v$;VmlC9+Jx|&R6TN+!1Gy+>p+Zc#&d| zLJ{*3Be0#A7=Pi#fcg#T3=d3o4Vwt`*}gCfu^1c&)xAy-YhZl$3B!VfiHdpF++kv? zk+F}XpJU3!-}dV<9{PMokI|PN#28sJa#=UAcVTA)l-L?3uG^EDj&5;HiV;#SbVIz z^<~d^>$*bY*c=JIMACRGLsCe~acJ?6G-`f8n{gxv%!8dg+kf8|zW~ zQRhLvYc}G|w*|HfK_3+e{*TfEri9cC?~ejo1_}EJy%ZdW??3K-)IYlT1Hu)TLpEjK z)Ce*U;#vFHjZ;M6j9kr$f$$BM7Qqr_3`H7Vi@=F}o{ERNfvthPjoLs}#GZtRiONoN z%`x?@d`tfFdHCsDc2jM03|cCp&z|G=W;tfA4r4A3cQwcPT}$Dm-=ex$I=-mu?&VE$ z+g{j?t$HO2B%`utd~VBfslJ*ywuB4w`wdr*rb`|`LMPo2w-)(R=CkCaq?~+_q*n~u zk20og`<3s(_GGEjtdem**?vWhMvqnHoXf%|RhTHL)v39Cx~c5BhzTW2OEh32weF4j z&82*;jADe!q{QT8?C}S5l7H=)*wBxZ*a%tlvGC7t2K5@vX|F2n62?qD*leN=6tz1D`tkQPU=xmU+s3PLV|9oC)Zie_2a z+5|i05(_Ph7KiupSL^ppg|riNaS6JQgOBwCp+}<8qWht|n8jLmzh%^&s^%*jGVj{a zTBFOC>OJ+Y?^{L-viJKM5-c_4wCQxbEJcSyQmD^WF^fG)KHuftOcYEQ=j# zL1C{FvG`qFm0vV1HLqE7K2o)2X~|#aT`*%+*648y!W+d);a+vFn!Fkx-(y;+EWj*O znRGgoTk^D(-%9P`N~oiCPjv6Q^!4oZ^uCKd9;>`pywyT3K^ecdxIGQ{8ur!rT;jg} zX7QD+u&Lo^Lr{iopyrReB;*ICn*_W-S`;-rMxL@giu0|ute2pWh;!DyAMW9%1m43; zmFtti!Q@31a%PtNZyit0ejh25&vICT)2s~HwKX-n5Rt-9Tu58|}p$XjhJcsyt= zTi;i2ERoIA)n~cqtQ2i8>@4AVFU@T)xUT7LHJ)CgA1t{Mx*VNltSko>`A2L;P~mIw z6uGWEtl2obTz2x4-H)7qxTt8(Ig@YXXXmkZxj5Bp^-eiVnVvh>e(?Cy8J*BY@7!?W zF)@lY<$US*u=~JnZ2fKNzystpb-}lOzxwc&K!SjZpUcbQ2IGADLg1mrCyag*1s|G0 zeWZW}LU+$+)9-_eYa{FsO3b)(Q|p)9!{1+_9?X$F+mt}v9L$!!sw+K9T`T9m2g@-IarMuSM z6#b?WabdqAQV>=9W~crqKeH*&>-Fjst%B5KzzeXac{%Uy;BM z(E9oJ%jf_I7~n57;Kwf=^55MrPSRigdkq;6T!Ro)7M7F*ek&W=85>*Kn^`+#{)FlP z-hi`_(6EPqz$69#ypU8PIRdgK`%fR#9n@uIxD2f=>GX}P4UFkrEN#GcAb4E3fR~oW z4*G;HmKIj_TrRxCfA8P|UW4DJCno%Ri-S2YvAV1Rp|G``F(E4*3mpS7A3Px;A&;Gr z373+H*mHB>FJ59Z2L~H2dU|JPXF6wQI%_*qdPYu8PI?9=dL|}X;0{`QS1Sj77g{U( z_fJOt*%2|eH?;d?LQAnB;ekEgjYYRv4?J8EE9Qc54{x}o@_tao;;SaEwkxr z+Mqm?Y(E(Cw)~3*)CVXG!Plr5sED~gBXS0vn|}N5x*U95XLpcyc62^hZoHqw z$@|sg2Zz4F6%0!fDKJ-MiLOqqA_*lT02M8oPNp%TXbHT0`VJu?6xBtfjeGOrH7fA! z>-Rd6SYG{WE*CK&B=}na@df3Hf!Xf@*dQP9H%XWehHKk9k%`yO4Gdr_(p?%nN^RfFUdp(txc=Mg7+tAtc;;LebQcSBj3$E~H~aDy?-6D{1`g|HW$w zY@kCw)}Xxi&kZ_JBZ=t);(J5>_8%nB26RYXAov0AxdBB0;1OfR2i4~hp@|8E@aQ9D z?mRghB)|xG6q=D;_B^6U2B1SYb5s@a|36Q`z)lvQa3J!4d*Y8D@21L(rK>y8q-)qL zXKTo0fAp(8BUkXdFE#Cz_0@G^YFRWfA(avpsDZ-qJnwNb@#}v5U0?x6v|KDv zuiO3V`k8W5ED>z~ruBHOIzG>vnqRU>CgF)3wwd+7J9n3OPSdTHl@qFoo=COP<8G`Z z=sH?|S0McXmSnKx_^7@GR=rahZF_<708V8=s6@fS~yK?MA1yLX#u z-cHIna*l^hw?#PYR>|}a1MEvRfXAGS@y;$N%Wk2VwDVsO=(gBP>HdZ34T-{Gh)3re zwPg9IIV9_*>y3kqKzzHGFvw>~y59Ha9n>er@fuw^4kMyY2%7(5tr1Nk);xuzTE}@| zg4O(@`Jpp{|6xml%p5k6%~G>d`*wcnY`TDcUib>6*SNmeIPR>z%K&Vjgg5*8ZKqX%`nbnMOt+G!Hi zgIv2tafY{A){W}{Co}z| zMIcY{XVLxee0TXmp8tNF_q3nc%YCWIbD=+xd4amPVWsKrM4H|8#HiVG*&oY^*X_)v zey=Rld?GJy3G?CddNYl?0d0Kya>;vWkGQ+AYT5vy_z$Iz zqs5mmq^S`yiAUxjKc0=rJL`v~96{>8;=)$Uh&djW zrmq$J`l>cYo=-z~G1KQv2OKSVL0}g!sYBrPuQWWvu$uR)=H@{J-h0KtOgirSm3W42 ziqqRS|pN}V(p>j`>`T7Mkmu(kLNI$(=)n{N-e)3Ao?ovpp$4){N;jGIoVsGWy+ z-CZ3Y>{l;c6uELCqwwFAaPO70OBMbINB_IaNpSj#P@d?w2h_q4crbj$NO0qb=PA!Z zRx25m4W_Rc7CPvDA8{r%PuKeW0tVlurSWu-yXyB(<6_;$285Iz>quF)nGUVhWE{cL zBw~;dyL7T0;A#SWySI5^W&E7W>bHgL1l=9(cdBewXes@Inom({Rspr66QyTAznJjHFV@p3U-POKE}vy-mDCHG-NIe&ts zVX%_dT9mklL`wszg(*#=e5^>tHQlcsMx^T6b+Hk#SlpR0Dpv8;${AJF+k#5q`CQD9OwZVz`(r_$XQ4YkfBtI)$Be ziHu%tsRrC$uD?eMm2mpv8&_kitc&t4Bw2(w=dIarZY}Kg@HxF;%bL?`hF1^eLwu)( z!{XoGtp^s`@|GBY1Pj#rDlpTr-&nASo?NjP*aB zrx1HvD8-S2Wk1?xr9Z{XPe#1-y1y~*8e|sttPM$a@5}Db?!d`gchvc7BBkZA zOK3uuZiLiAm4(co=qtBeH&;YVcD!b1R>8qJ3`yt+``_r_? z{vRXXcG1bnUBhSo$XKkrz11CL%B#p)>f@{qe?kGd?vUSvqGE6Z7?3_EGvPBC1v$%t zmdi`$_S$>zFXpVvLo)s-7A#}@)_|ru%GRlO^6J%SfQmoDY?Q$+*R$&(&8qLavFe?R zH%^g(3H`f=e~1;xvMF@l9_@@c4Tz;&Wu4LUlBKuObuj9Gd%5v%r zp;KJ*yg55=2Jx8=;y$)h-nmF655MkVec&x>sCqhmSm+?nS2U~BNNHho=KZLg_zc!^ zQr+?hij3#fGg~Cl=DZ*8@24H+unLEWLuppRxequ%{Z`ZkB|;k@X-xHxFCOzv#c^%@ z%GJ3x2?Ym9)Eez9t5-Ao%u<|YKap1t@!i%S<8g_E-W-OxFIm;^(##-Tmt56_P! z#e^$F#(~LDp6%a!I`EV6{KIl$MIL~ue9UbYvr#0a<)J|t*su5z%L?CcB z@ssq5mXE!y9-uFi{lU_+Bo=|syJLJaO~S0a@Vqd>=_49zotd3(pxI8(0d@N>D`vT* zB|%edu4V=kjopsSV4deR6d|O9dZ!cCfKfuRw_DuIXXpq@dD%I5_{Dj5lGduaO2x;= zVC{`ho-%Mgvv2`0&YOjj&Vj~mWu3w)dLt+X9fjP`vmJ@O->96uE{8CKokTQsI6$!y za0E>hK!u6}8NGf08Wet~i3QBAuM4C~+8$}{XpzJbotfYM zK}Teu&uugE38iP~XaFY>1u_^BdxoMCFaUOSsEGMY`HWpl0egm}W?9eXetrd@D%Y%J z<gxbNWqEP#-|^4>qDrT0?5RFq?w0;|FkkC>X`TZ+j`c!g?OarI>K{6S2O1Vbd`S5> zuANZ8uxHfde?U4U6vq4K8`6)_6~Y>ht8Fz86fj*iHKoq~jRqUCtlKc%ECxKEe&9iV?sDKS> zA%KGamI9)(YZN%g{Rbf1(SpW0ZF?7}PioryRhyFy5gG$}RmflPYE8@>)S17|WrMWs z<;42BO{Mvf`(%3QC@o(hdZYD93|}eKVS9DB zJgyv|t7y3IkVEGWp>s2C>k@FQ5A_W8*xg>|UdJ=Jv-A1l45-Rm9* zWO;9#9ptCYDKFMXdsk)lsLt}QgT7sY0!8OtBjI%jPk&R95l*?iVQ;c}S|P#(D5I#H z`uh*=&*K@6__g)+pXU{53T0ZFT5{+A;@A~xO>@fdDUl|Gq$E!_s*Yt(&wF;Q5vrs+ zDf|3_T=vJ>UvTcUr~Y^i0CAlpHcM?_v(}j^(pdoT$o>yj>HYl)jq)&T#y0!U0Ak-= zsCQ0DQ6ootn$%amCT6DKJZ@<^^wkI4d!gD7!)+pjPv7vIKyN}^Yf zD@lX17FX%JKmsqDG>?JBX1nPcM|#SfK9Ku<&{(r> zJXaM9uC`j3J>Q?3a@?C{%wMls@^Zi2$uF);&~@t>NadYda6fE3C@3F{PT~!XZhFeu zSzbZ@?$vw*!0o(wapOiUeMt-C!B$p8s(f&H`KPg8R-v?2HaHd~zVzINU5M-+S2oS5 zyZpVeXa+5D5eexE8HTN%7}Vq6xVm}{n;#FnuGbT;+YoJyD`o!%LHhA)30b!37T=eR zZJBUv^ESfOYthmhmfrv{G7ZM=P5r|e!a6{jRo*L~%rOEQk&FkHuUJ`IPjbRZ=2o^%3gm(=ITx$?TO%d z5kJXD2=)eM$YjBBY9Lve`sqpdmL%{Q;~GoPV_EBqj01+NWf?esACu!A(3&0DJ4fNW zYT>VqPOY#u6`5N1UjU?-f zUiJ?g{P$-u&ilYIJ{xjN3ZF6rPOic^R$#?-UiZcn1lk}nKIC=|FzpNh%FltZk%5oSUL`h1VZY@aVmG`3gZ-(Jg`Ytb-$NjPy!NpAMZ|^`qKr! z{_JiK7^?&O#$o{ZTVbukw?{-An;f6qy7~UXLp3kXe6gW+ccwh&akf~Gzj!aGoWT3; z*a}FJFXj?7Ox_-zQs?G{;`PpA#y zwp};fUuXrQnNmKT#kt6TeJqhxXtDirj)SLWwa)QMG@Lrch51XRrPO?H21DuaVeKY< zAdC;TP~`%O&Gvht~7ubqA+p6|{## z;lJ7yDX18g(#RoU0iP4smWtKxs)C-+>E;J0rHt;xQY8O?F9roJ$$!WqsK9GGp+At6 zz9rKW+wGt7Gm-RTGzouV>yb?V_o_0Zi5e**WSpyE z%$Z3IyYI}W{8nXc6unOe*e_U}dXCBz>|9^AF;(#y_hM)jo^z?6C zhHf$5JRR2xo={56M2Nyw3LWajMh?ul`D}28ld+wj+4Yk-+q+IA$30VuEgT4@+LXHR znt|s<)Y;TCVwcd*v9hgZm%wd{$ohJ<$3@M&o@ImcoSYX!8uvB`nKL{-WJ4(HZMdJOZXfGbdao{Y8UW%-D>D` z>aKPSqWB-KR=pY=wC`=3W9~5Dtjwujz_vHH*{-R^6KTur_L4h9%smJtsQ1E_7tM6A zc(oHe+FAvq+NJ-{er;`#X~L{`3HduZnUl)`&1e3rxWlUwC^~Dof$JmUzgUZ z7?`|k0QlZ_G3kG##3^+hCsa5MyN$Bn8?}LXh(*~5AyZsVjkUwZ%haVIn~LEtmpGwU z5qdhSxZTV{JgeoilH;qI>N_>vmSqWMU2a8v)5F|cFA$l9_3inXj;-sqfA#a0e?#^Y zdESZ9&BW$exsD~3tZ~e5rVS&`nUnM#4HHiVY4R6#!Y6u57?RQ!ssQ{yP`|C{r(VOp z=#sVHOH(@UyySV=f2pLJi^8Af2EudZh~B_X?INf0L;Q|eg_?2ThuE!|^Zp1pHEtZs zTY5fMLp7cb@h?Th(62R6J#ovCx{$xix?5$wyhE>LkTu8FVlx&)!?tqiwE*bBa2KXI zp&8)#X~;y5sJ`#d8Lar-PiTt9yI3_<1?D>DgUM63-moTKW<>t!B4MoMLL524l`g=G z5Xx6Ay!(A5WV4hUdc5C{8py=1-<*;&?p|X5=_xUYl!27p7;s?2N^sPK#$sykmQWJz zVUB|DtydrtMC`*&;DxjKMS{TFG;BwqJ%8!<0$U1{^W80As?{;v9d9>@i+OMnYJC9F z#zCo9aWs~;$9q&3r=u{Numw(b{r{+|x+EkA9zNPaVR%u5DtDc(xaK| z1^}X9fov*}rQ%KlKk0zz1rk5PC$oQGSqYqC^uCjs_@`Y&0^R-Jn(was zpWa0;C{2FMD|!aIegL-npWS2sFXk~^C4_gMCm@mu7|j2x!7ZEq&*rSZv|Evy??~hbo_kg9! zf3ry(NensggXL57j~%q3G)BXKtidG9s>J-?pXReR1CVew$mEi@G^;GO*=EhMWVmJ1 z_!huC?!zrre!&U=X}0^DZq!`?qHF=6m)F4iqN<+f`4{?YfQhbG%o1CtUCL9T_Fe=w zkikl;1+FR!I=WK-UE9@2BH{U1O6B=B1p2*6fJk?1D5E0w4gk#Iz<+o2wHOpFO}8CqrK7U*!@S6Lje*5_alR4Ypc2Qz0f3U? zw)6YpwiJZF3t-ScU{h?AcU7sq{2-bDYqL(_bBVg)Sl5C#!_I?-D7lGiC-u#`3qO2^glYN%DxZ{?gDr)8JF>@bTtVu(kJ z&x?Zq9IK;3*H&mO4&A29dHV<)9+wht#WKUvPE30+9wJY1h}Qb(3%z0P8hi0Rf@dYj zHAW#2O<1$SYP#(t{T|a)lc&3$7m$qb4c-F6 zO8s(splM@`ZKqOkJ2H2HT+C(nk@r?cAnx16CGW?^=7@uZdUtA<%@pUs#O*;(eqgK z7LEAMB|yx&I6FIdlQsofc1IS?l6WAu8D@bp1YC84K8Zu~(7quJ$`SE~_$^?2{a#lo*vt_5c{IaqDWuNoOHX?E_jH zT6=jc^2ay5NZrSUy=2rJ2S!&Y&L01Z9n<#s@0dbWi)$cU z-1Dco^m&AVgu7HNyn^w3`9a$Fs0+vXs;=D$KrUzPJ1+RZW9fPcKyp{nZ>DfVF!nHn z7ASEXLwKs(c#1oMIX=U$hfo;Z&|Z=PJN$$w;r)h-jC&M?<4F&>DOnhiBz*n`u)m&V zD-LE5S6^==etfmt70cl7JY$Rns#ymju$@3!1A|i)#|}VlCK%U1Q18E~Tsj7MYa=Xo zdh8ZfyJg>i=&FI;`zBZyng7nF8O~TE91K+tnr;tHb6RG_ey^gj>r-G!`5wK9$E7<% zm**Kd>_THDm4ksO?&8`lFbs4{;DX=0I~^K=M>2vRUxwEW0g~E9cfq|1KXxiw^@?8| zaQw-x8;MQIU5?xH$E}s&>w0-d7R!vSaaL`dihB+vN4O&yC2+=DK0inY?LE5QnojFK zTxZDf^mPmrmd{R;OZ7qwhXCg_`S2Ev9R@aC?^Yk8^alUqb%eL#X&a({zZq009w{1? zUDY8V#=M?Rb zJQ6>yXGQRxq{5L+5NWqKaWRmo?`EJ??}L-NOJdYjTD20AEIcgjeg_61Yn$u$w87(f z8J8;cy@rF)2@1<4x8uIc^-({g{TFUN)?xR^-$05JN zlNoFebC)Feh1Ce-3Gh}F5dyNVf#ioQ8hPw%&5Y|ojX}O5%XoTKy5%Si zU!6PQQ~8qaJK4Scaf>fAw=x=K&wZ#-U-Z~Ai=2=r<9RQC z<+mAB-vm~4SmYqT(TNR)R;jldOEq9mabs3=N-8!LH(vaBWJnGpA<-VI)$OvKB7MCL z)}ly$bPkvg9Wl>eufvD1@_6SKoTenXSBOyezA+eydaF}J45T3o3jkl1Sp50Cjvldk znCzN_ZjM*abq;I5u6n+8F2#M8q-{vzc$*GhnuOt(cf za|me|H)%Z~+D$;Tq`}u7R38q@?m8a`jUi!-rCaq!cpXc5>=uAYL;;~&0VwxJ^oxTj z+_xDrHCl?f>#(X!m~S18b5%1|8%x*Pl6@G+lJfaRhDD0(Bp2}6qoi^h%y-ojFEjZL zBW4b+Q2RDv99-CX97{F>_MFiej!Bk9qrnFcim%wFCVllb_T*QsS?#f!W=SZ+?ssjd z#4Gg_gR_2RzdC}49xt08zww5bF0I4sLTWdSV`YEXqtDbg&PoqbT#YJ;6sXX3kdqLT zF1LwLgcD|}&^TOd%o4qG);_B67KUp-x?*7ZI!L;DQ`~g(dFh}62QEx$4QYa_t_YR| z(N`>CPnAZDERx9ZJBn~mX4?^MbYYuDmkg(PpIGzUm0i{8$~WbQ8mZmhwn_`Lg7Tx; z5jRG}hzx5Fi=DAIs_Ly+?ZlwdNa-ua7>-ajwdoVp%Dtm`s1j#YofDlBnvB*y?X#z1^jN zjP8xJ(e`99UmBD&;YOb)QEBV3lt2!p(#Fs)D(TiVFI-pEaa8d(2R_^)-2WYwyr^7F z5}$hfCZbwVR4z2d6`)^mXjj2Hz_ zDJ=haPvCV;VnA$$0l-zAp9=)P)*{^=#K2WeHReLE&#~&K58wT+?}BS?rMc62W!ptb zuF}m(sDToS6E8E|K9n8pFIUg7EL~(_KO1m;ns|t-pX%1|rhl)Cx$U00T&p&v&|qdH zm8~mn@U7#{SWZM5D2>f{8h0oqSeC?a2`qt`-2r5L+EkYjbXJ7~u0ft^(gy47UB}Q6 zP`*$w(0>wuCcv4=b4@6dHIuJK&rgOI3crr#tQ(7h`6iyp`05zLAhp(u=l$*qquwCj ztwowfgn7#D#?2>LJBS)pikkTzj}3JUHp7^#FPTPqI^R4aQk#A3>KDtliOSOU_JdnoQb%t=MY>CGM?=FvTN0`dyLk z?!~*JR*p}xOsXm$6oS+{`a{EokPcf5*0vZgcxlY%EP7DfKDv%F2IE}HC8zG0#HyTe z)%;p60RWn@SM9)H-#x=8xMnKTQkOZkEvDYa;nfgIkmne_enax=+Z+E(L$?ooqY?Qi ziOYzm24n*Bv#`lqB>?30T3okETyetJqZsnOo~1r0F>CzZ7Kf}!cQ784;o&eF(x^Ir z4kpD3b)bIqm(y%y{v>&PFQA=-P0469pKCc+b*`Ke&-H4$W~Hd>Tlky6HpJE1*w%NT zA0VW#PIa|5S2z?KJqL9T=MTP{b(pm;Nxo&=Nxtf1ozo~<#j)e&gp;XjM;ZNd>|>sB zi8qPkg=@-*#)FQCWfb&o(se$}bNSe@F+9N^r54wP&@9P@@4l`nb&wpAIMMea5jea3Rd0 zzI0*8bcEo~QTvUVJ(PD|Z`+Z}z;R|I-@((-3($Of(Iw){8&5bK{6CZWIEG>chzOcU z87igc{$bRQ=Ytx8&%l1la99A16lnpn&dUHca(VEGW&XsdIOa}F7h>sc8lK!zs5>ui zT;%s81yOL;6%Od76RfpX99S=+XM%W_9L8k%gp>6OSJ6W>RyN#FE}u(Mz6eZ2ytjtk zsxz^J1GD~Kdz{KCv#5NZ`{rNBKy}pEbG)4^#f8fADmus2uF!a9Sd;_-PK4@*WZb_yTmY8hSi3^yzpfXZe_)BB`lnDI zLwyF|`0Hlze;PO-mCFMfSO}B!Os=ev1JZR`#KphLx=euUA+K}&!1+u!b;|?NH$MPMpOivh z5x}0j!)dPFKe{Cim|-uz*?(M6Fd1NPYeRAUiNzEl;+83^|cwGbNwwyA^8E=%S;lFd#dn4^}PceK0O`L{YQVX0QN-KBdCCA{43yH0}gL) zW;Oov#|*Id|7-$=!xmOS5kq=X|0W5TYDr+Zh?7L)o`gJ9-!@p6&<3C`(?3p?^4Swq z-wsr4Ubl)E(bVr7f1?dN{^t)i*q=}EDo@0!08|Jt;krD`ctp?s2Lpw^ zso#J7f6M?Yz*$Hii*LjJXR=gGYaq^hsviaZ7x)DA8K4{#XQR0OY1ks^ zf^akd-Y)CLsepE{eKtT5<8!xT3^~61Jg>u%KwRKNHxK~^S!zvHnyXa z0=T!SdgsG0hwc}9GY6GZy7tYF5B0^$w-@^ed{ul1O3gr}Q9n5Kw*tgItCf!6BFEiH zfDCg2sO)%q9IKiPEt^*8-Et|;V$plPyCY$+N@&7jr)5#F}U%WpDgSv}xt zVK}R}!#wjHdrKDx#pHZ$S+xija5e!DKttCF&Mq3Uoao)&0NZxZiKXWNE;H2vDu3IE zNsA-ivGz6|ci~t?iDFVpMS-h6o9MK${lANdhL1LFrn-Z9{e(`KR4}O{tZ#D&lxNuj zy0)uo;?0AQXHo6OJD^5(zCU|P)13CQ9fiLJ&^H#W&1FK8<@x}wZRK!R-uos7kn~(P znxs9+n90HEqCgg5B$X(i`6SIHP^ekc`W43g3Mf3Wm8PwX-39XW>JHb$)6xIEs2$Rk z*CG3{6PS^9u$cOA=E54U=jA#DNV12bS3t7c7#H3txA_|^O9OPnZxI!Eu1&DCjFz)M z1rS_^r0PITaU(JP78E7*8E`1qsp)x6Ot}HYwS54AT>l2lj;9MjDP?yr0VGry8VD$W zwAPy{H3ZSH02D=|_i^Lds5El*1yJF+3seRda5M(TbmDCUbI%yc1xZ=lwOcL3u zMK|-qGOwdn7~UZO=ynwXLLbtG26*cz*4!LSSpceoEhijkwW`jfcqtDaWjd*9M$1j* zs{!leWNV78S`)v4m^?w1kSzoHz27h#nD2u`p!pNdce8aJ%W+$R9h_)Y()!B7k=dl* zrachla<}$jbzc3}kx0EtyIehEOprI^&5q8Fb#L=YFLk1MW=S=mlhTk{Icj*GPb>i1 z^PlFGQE)xs{txS(2c=!*{>GzF{tTMZ4m4yeaP1$iRjPH0`5&EJdSCIPO<%W`kSi}r zr7hlsQ;)Hk{QTx`N(YJ{U;`hQBV@(oq*GBwCDEm=U5j=-v;wGHr^ANRaQd&eHSU)O z8tddT-~T8Wbr4GaXoFu*u89N7&eg+02*z@g9mhC5et;$P$izPvmg884YOZ)xFxtamHVNAl^(-~|jfI~ACX%(z z#PMB!^82Ae5<&Ow)o!Lb{pdddSZN#J*)DkWx;qBlng&kuX5g*?jHxpq-B!8-d@i1? z#q4SNSLBmhiD44Z*KEgmmy3^`_DH6FhD$)ni@W zNZ~u%3%$~z>IL8oK@&YTYYk<11Lt4TGFRCKPd!aq0U!(sSg>}en+lt_)o}C~y&w); ze*jX>n%Si&0ZRF~`bJyqgd~pyCHLwNn~J0p=qh!(qIkUllC}<@rcL8IqlqwD5IE%& zf@S7*`FA z)#Z}d1{Q!(H|ezR$F2**1f4RyYwZ|(<7s8VpHjJ1$q{IoC9AljC>_sc%sr$@;gKUO z_5fp9_HeU%i;KdY7Pnq2K(qPsA~xearW7T3b~g1zAUMg$Dmc&UuQ!tJ%Snx`j4CjE zjZh`N5rb9)OSIuDpu76erN7cx1mrKHJ^uNM(lGZufr(z53$jD^6fW4{ zR|hAGZJ$4u?lgIjK6F4Y8P5<<{Sc?PqOQ)SjZBRr9g0OsFSmoLLSYnTk5ph#_6Mj; zpkDg)$2hD_`1guNH@SX*o2BtSk9xSG|06Jwrm-BVIEfR8FZ`Hw;W4;W-!F#iHoDFH zX6h3U*>^6hKz>crbDAt$Zw_o8`-J(f+GX(o8T=3HvBS|pL9v~&gL4SVQYI%_-?tm6f87Fe`;Bb1JE{k# zmMJi);zDmIY@?<5Z?@j+IR_|7MQvsl?-LI*fU6B`Cr(bp6wqwHu)eEwD8QX0fyX4$ z=<8k11S`*l-{n_?u1~*+zc?Nxc1o7EA!QE7Yjtb}qV{_5#nL{s*M8;HU{1fShGz2H z^{N53Ih!D?aAD3d!9S^`D=rxULYt76jP!0$qB2#S@KCLG`Ldi^prDi5?`8C9fAHBz z$387~`vP(T&Dgd&@Tn8tIe;r3E%&3eM3ohzKGO|*L4;ZJ=XVL)fQmR5#|EVU<1!h0 zc0_#=;|U7ynL<&!^rBp#VdMGaX~pIr^!h?DambL1_^0sGd|-wfC{vehSIf9~_XnSqXoOa-#OC&V7weYj zA@GP;^qP?3NFn(4+v!7lCW{*Bqyk^7u|*lc(wfB9SPCn4_`fJ=K?OvU4N++GFm=u^ zNz7CvCy8j)o(px3eH0_Zw4ZOY`WRkNe?AuP+qkiU54cwk9XV}@D&2(L%hHFcn(Az{ zN#!Wj51|mA1;y6HW!s}}5KUj$B_yL8TS_ILjtGiy0<;uA#%vo8$z%`WC03m-)(|^B zmU;P6-Q;o0S4BCzcKgqOW|?jU`?HN0#I;P?kKk9jV*6z@sLNVX2!T0Pn3_e+iJ4x{ zV6_I#Hqm|{7qVn`?z+UxUiM7Lv^Bb9kHeq<_TUbHAgyesw1r_h@8>05h^QzOcfwiK zyF=M3+VnNeYKR+(CXlGz6OH5HoQ)?-H)LcqW}M?4ZRwY<+Z>~JP!Cue^r(gFSIDYv zOZM04Y`=-8y~WnwJ_%cC>QaV4p|L4gIO3!|zghN-z42(Lq)RqR^$|)fRN5_G1wf3{ z`$%|EIqgW7_l$oyj9UuRY`fl`zHZ^g)2yiJlg*^log0icC!!FgiOfCUbsoGOn!mTD z=c6{9Z1gn!1Rvn|OJsoDmT_KNH`rq{_@G_uqmZYK{}dTel&#*CWxEng?v}s_!f1=O zvrQ?ftHBz?Bj4!hQR7!uumu4hm!@O?YCpnBV(qh8gQkQv*BpyP`d_)$eULX&jO$kF?77o{C4?Ph@IY$rj0%3zI< zXaoe6|BFp1?MWS%CZ$Qq0Op~Wr4ME~8-}Ecc2Y}Ymc$jRX`>2hKv#cDev?ke8gu5CK0U5Yye>-w?nGo?qk)Oxz$~9dz zAD1o=nO?9z#iE#Jf+;iaaIJPLLbSp@jMg6gUQ4(k$7%hwWV-P^lZh#%Id*uCU)76BGf(G>i_tUa%zddRV&-3T-0wH3YJyyXh@&>#Yo8 zcBk*huJx}zd@v)}b;xMx?k<)u)mIvicnzzlpWN&y*XC)`Ggceyq;NGT@x`Dsm6FS3 z!~>q^os->*I9xD_G3jHYK4&$@vxE`g8mCS?ixI%3e@IH4KP6T`8YioAEekoI0% zhPk>>tR~WOhAj*AXNuloWARQh(y5ofvYb|N@z5z;U1vCiIBx*NOWgMQpgDUK8)J#_ zH)`5Z%fXv17$le0mx3@sGgBNW7MvhJvP|pw^ruG370d3p#nVzMX4C&716^W{A^OFI ztwB6(NK6zeN!AE{*htRTIQm#LXTBMP$k|Y7D=mkU~sZ($f&}w#Roh#+3pp9t&CW6%08tZX1!|#}e9*PYV)&+~X z>U*==LnHSWDdtRuZ<*_D=>~gp*p@lG49`RzRu-Q{`YN$AuylYW@yUB-r(lzaZLtqUm5t*5qq=fIAn&3J&+~e36zF(OQtjow1 z_T^56qA@1h0=U8dvE>J#TWmJ3!;D^-Q6wSEh!Cx zlypmX2_i^HcXul#-O?qkbVzqeNY_2wyBlF#Co)z{HX z`itPN8+v0E`MoGqj5+popK9q0E zd}}nxMJ3Nb6`TIX_8GFViyyWu)`+rja4oIrw*UhgVPpT^59SHG?6#@+H@vNg*XS2# z2S4Y2Rhdr=+YNw1{n$%-#PDr>A0oRx`iFgg{0QBE&DFhme&nN31DP!opd43^Cwc+HS>JE=qXeNUFvYSj{%tH(ibk} zTU7MU50b?aQ4m_WHRhDtIN6ocp{1G!2Fs(!Xf3>n&)LmI;Q{JXJ@gWT;|B;D)2M5# zOi@W3vP4$tq-@a;#w1DrHui-T6Kb1&2w7cj3m)c~r4fvCQdIu%m>r_+uhl*3mHvo{ zq8VNul z3nmEKkdxaH1|M_quP8=T;dh~Mag3j>_CL5#7T{B|go+b?740wxXATO}TDC8gR z3dZC(x=VXx>_I$LVMJwq{8#zq#_IY1%Or( zX0rt%GY1I$@k%6NHF)2&97uR`hQwUoHU#{vK+&#i+ZRQyC&L4DU8R7@wjB{VcLtzA zDa$!fZvHU|JSFBI2V}dQaBPdy=J3IY&s`NbV1J`2&<2wG%!{K6t;L?T+~2*;tXcf} z*6j~6mhQGN^9S}WT{Z0l-<%tlY;>)7qGDlRP=JFbf=pNhztEfGII%);>tH*S9 zCrdfj%V$+?5Qu$&R^$6E0C$paHGk6WL3*BN5J=JO1Ad>$SyhK}Bapl`{IOI52+J1G z$NjTPfp-9R+l@>WaI-ZdJtg6^h`JX%9ap`Z9bj&xYsp4%Rcn^HrkFW7N zBOZlw-Gcem2<^2wI1hlW!u9Q5MNr+3eS#@nxAZ1Rz%um+N3#u*vqAfBl;Rt(y*x~R zz)uKy5}UwtCDw!c!*_fXbwPhoN&)GNkqD&kfOf6Bb_?> zILS77k!+r^9e)7X%CS!Xm1!zjnLwv1OhdN^jG4XRK$>SK77P%w2Vq z!L#VKs(Zo&jQSp}_r+gY96?c;sox8fC2m`z%JuLH3t zRMhg>G4QkFjK2h`xsOA~wHMq~oy35u;Ts89e+wp@XYqVQFiylSoU$1ZWQ<_#f6tXR z6VlPLuITRglI+qn2}u?wwan88&+PMoNFFk)9NL}&^&Ede?oZ&~@1XeOJ_P*d8r;j` zK2ndhXg{4@cw%Q|8WbCw;B^AC!a5*taviF(238+8H^{6#G855Z4J<+G4a$Z=0!fQM zsXW7MPzR>Dcg{ggcKtkU z2vK{-NPb+Wale){c`TJV27}ccPMt|u!(UByBZ68!g}z000DI{m9BYltC$c#JIKMus zJRfUlds$djaoB+*(EKFe*{D!SBbirQ94vlKY@zf&3qInpFD>RR4N z{Eu$?_#Pe5u0I({|2bwkZw z771o>zz>!B79_d+1p13r5CgMTpqRchTWhnMpzEpy2np#rkOKo{#8BPgK;A+?C3dmCGvOHK06xf^{MO;`-B%Ohzl>>S zEc3&k^dbiRZ|d_wQ-U#$s0RAdkD|fQ_e;eKdIwV3O=X8)L(IZYAb{JxyV(?J0&|SY zydiM@2%>esDmyro#>b%=4vHvG$>CdQ0k<9EnsTn2ca$^p;VKCu#^hf3c=3!HE-~T7mCcpI{f=I;p_P2*WX{l^^-^O zPw}&y1C!Y|Vz+q*E!k#3(k)=ZUlcnW7lAWe+dMe8~}Bql2q{2 z_;@{7ipj`ptfnm?Jlqy6(JsK%rrxQZ2diNxKVtvra4QbXPDzXx>5_L}d~OF$cc)mK zQtL+`87H7GiW?wo*iQO6c<^8U#MeL^A0eM(Ufohz;|{}AiI$_S$JS`}f{S8453n`5 zfd%TVq+)tBb%JI`)9ilBjnwU?cAf2h=hG~UiK16KA2|!m>$8^%Z}BbYY#VJ?siKHV z)gLSZt#uPn12d=|Lp%cD985NvO0ym)@o!U20mkTjs`YMyQEon|D=k=rRuWXFh4Qa} z5dJk(sa*Z)dWb%M5OaxG0-TWtAlD?tNBT)EXDzr;RN*DKkQulTbri!CKhEiVgOj%v z5zu8<$O>8)LH0|P;WfY+R!^i202khOAfbM?c_-$IR+lO0;YcpYVcPHVIhHe?MW4(m z`$-vEKN%u@B3LX?Z9DuCPf0e~;uSi^_hzb`UX@PnudzmjSuDh8qxPwgJsV*iv1@r7 z0531o!zAgB(|7*~by*m)-rb+8SbED{rfw`Ob=Al*v}Px@J|WW zV)d57yFCH%gX2$92i}jx1Azx7bz|sc86^FH>H7&1YPQQ0+_fm7=n}xFxwdI8jowuejY$xdireoYi7o}>7OTxH(h6cVx-NvH=chcK4)QB@yR zZfasQY^fUma=%hBo#ILEc206Ch3&Xr>Ax@%+Rh z=S?^AZaOf;SKe!y2rMwRpYs!r*WZxFq((s=K$d0aHW>H|7e=K|Yv8pypPQB@lze2@ z3vIk-yzVYHZ9z&=nPe!AZGOD^u_0a=;fCt;QP1N*cbEU4EWWOSnj$9mId2}G%oPVR zmBDI3w3@FQYFr|+0$OY}(#SxH>24knbH#nJRDmMauh3RtEAe4_&2ib_Bs{Q~7 z$4^FJH#hN(oDmJ#W}|EA)hypWZn?X%ItR4MRb!6-(1WFvFz2PO<$nwrWofP3hZ1!9 zF{&PBEf*_DUj~=E1RhDP`5fN#BBmSOspH2R_8yh@K6@<%>DwI=9= zSKApH%bzl^Fg${k!@V0L_yOYWNijTe%bE9DK={^fyby87Cxb`#`*uD~kc`qf@jgQG ztP4}A9x6P3f}&e58L~|#JIy_Bp5F7MLxu}5o-04h6$|pC;@gdzc{iJy$e0PL@R{$~ z+Q6f2U8he#$iB`3DsM=lZ@%AKROsZ;fk=CC4a$|Dm1mpRF>B^Y!+YTUOfscDAv(71 z47JA&i5Rw|tL7MCPZ#D_0&zwfW;~0f>3tf@9w-7%HwwplhaT(k8uP&O;wECk>EtKK zc$opS;|2CG*AY^`v2{HPhWL$r8Lsk?lc4pq5uB_giiCDKw z*i2|GR{pdJ#N4|g&a&G}auQ@zz+bG0I%fjt+KLIk1WgvZB;ae?#$P=XetUd)jt z;dMk#2Ss%v#U@2q+dx9%b6_vM9pIYPD!-bXh*=ZV>p%diK@=~vW-?$u2KJp17qG=S ze?n*#av@@ftY_Pthr+jpHuYJRwr({-;A-`V#^C#^H(Mk^f|`vOZD2E^aVMTr3E8lM z^fIvepFhbMMnqPT?|Pjp29MGnQmE)IX(1g^82dr2jj{@C&#w8UA|tOe*32(}H|6{l z%@_ZpS?4>84V|y^(5N9Qg@U9}e0q(33>>We|t3SE$qq@I5#39pxw@PS5uU#gm(0|-s z)#Zen_~7;4BIo8cxVAUEU#kb4a)f&Geq4avDf_F=er64K56v?HpFXfs>LrBY4EQBp zLc2+QO-qL8MCkG>TjhMT0c89)L`=i4u+=w0cp z;FGRb!O9njsH3vyZgnx2t!nC0%ifS6;}fWYlI^!Nv(i8{=r=m)uCx`Eyp*$H7odpI zpR^7hnq5;>0^KVyTEiC~%pc@mg8j-xrN33b%9^0u27c~Cb8@_5gmVeP6-2R$Ro(U) zQrhkt1t&pb0o4^cLqYyCp7up5*%b14K_2(eL%+ki_pF-%^bQNC&aCphW}viivo6ea z1P+o5SDBr8JaxNgyTC|rt6S8}c(lSA{6tQ}@tR9QpFAZR8$SsZMW;G-OQ*O*?+*Q6B*?4Ak{+urS zmAY0pE)OHQr**}mgUZUJq`OKoHHi?adaesa&h^fY0FFBF)^7+98+&bAM^Qv8^!|QobYFlNJ+o-q(kgUk zCeyJhzDe57Xwa+u(y&_Ci*|YZZlSc3Ktpj3r0`W$&$APw#0aCU6af>9&bL>z!MzyS zeN;oZu~NaS3{+*Iu$7ndouQkK)GvGV7S4hQNHqHS&uuXpi)~V$1%EYM_jk~m)6kI9 zb8YD2N3Tb(a1TB)tTEu~Y$zRVXMOnO9+hbH zXLc!3elpA&Lf4;P|NfQGiijx{t=ZydgOtea&;I+s|GGvm9qwgrpM#55M;QIGSSoTN z7K>FJ1DjfjdDUa5uWcPpC$Lh{lypC~kcKeSJB%4>NbX=Z2|A3XdMK4;vN7Z2UM!Fw zNbim52^(td=xH#!2(ZXl(A3lvQ?IKCA9#eAUCnt6Xwh8aGjFy$;Y;xP;DsSPU}(;@ z$kuRX=n(5m%AmnGAn+a~<*8lu{&&*~pRr{QwYe(XuQ)P3Gn>_JpVwH4P1L?C`r>EBf{h)=Y_*&p!MqTt3mG2w@%11&o8N(ZM|#W zaEWx*2o)RFGkNu#)ELeS$GP)v!e*B~1<@s?WniIv!&;#Ee zItDD{==5T?07nNp5TI;EMrYF_B=B{Xh`FWxRCbneEzovtn9U=P%FEm{ z)L@?;V-UDxhI+qs)8l!^;(VD+*J+Yh&Y;P)KKKONe8_*({O5|x_k9!pRE$kYSu^*v zcnsH1Ih%Cr0-av+Ok=Yx(uvD=hxHZ>jW>EjBlb4pvh>h zsTQiUk)swW6;eEdbEW(XqY%{O&)}-?T30 z+qto@7V=$74$HK>((N_VFg9=2sm2_<()`m=1LHR^?wDsrZndQa!jk7$o9kD%$)XN> zqH4APmw!B~E(FcoHyAYX-j#_#xKH~ z%5ZM9Mj*pKbf6@E@`f~Hv;2g>=IwAhXPf+vF~<4cOp8&%AhqZwOnVdB`FYSL%e*3u zh~HF*VG2U7kfZ6uN!{>u1d0mz!V4d2J|>1&54-7_c4t!fK>^vF)D%p_;lYgMNy z+yAX$h?E(%c+wRH4_p67IO3h5LEB3`W#`A%$c!0J^#q#*8#MS~(yEBEiRJB!Rn7I( zkSB~nWX#4ZQE=Ar!bQJsXK5DjI=pOtguI&_;Vi7&SfhZ`ktfKFg1ZZ!$Sgj@KXg(s z%4ST+;)IDo?i7GfBCpF_e2H(C#w9NF%`UqX(dOtJ_pYDR=L7eAmF9u_yFNG3nP{&b zwa?Bim-q%HT4KVQ9oc^7XObK4BV3$iqx@)P##?g$7T-tB#{9O3UFH}=BvWfjK|28A z;;GV#Gt*+k?=Qk(jc)wm_!5R;D1|SY{ISKDKPphI@@%yh(EI zWOLeO7ox36fjDySoi?A@MTaHrMnt}70b}N;(d+IMRz|kQ)|OUGtNa71J!LyRDKD|? zT)&rV>-eQS91d(;^oTF$G)nwOUuIEuBJtd0#~;qul%A*XJV!_;(;Ih=v2GkV6Y?Yv-2f6s{gd14PZmk61RQsPDnG82M=@YBbp~))@63p;YfO;RZ%3zt$-! zAJ2_Rgs5}8aFE2@WtF;{>KWLPXSy}KHgn}BSf5Uu^});juReZ z-oI#HiTAd}F@3BIUsq}_?|}Kb>t;6ja?D1hWB(d?SJ3MBrFH0Y{|9n_yt5Fg^^Tfh1Ifb z%av&BspAN@)eIjpm)Mp#=`lNqH9vLZ>C{@nK>-kg{vP*ipzE?^SN+M>w9`&_K#$y{ z(RsHXrM`zzfyvU?=k^ftCf+^PHGx{;bFLoqBtfLquVMPA)%sGML*curirC_%3~X^v zl(6>|VTqokY%&%RRdgL)j9!K_tT$S`-4UG*O+)9$ajr-5rd45CkhJeuTJpx(kDHVv zc8U0nNhY7dKjlf}TH+{26g;QD#eJvD6kRfEywgOKJpKC$T~E~iY1AiJ8bw=Uw;`kR zN0j$$4GKv^q%*o_8nZNsQ~t54c20$f-W5rhqrPod!By{#LU^VNEIk9a>I>?A!*17+CgELrVx8w21Pey9(If}W7NKQN0}V?%O4M~!|yd}`&SaP=uP3` zv;UCjyl8+YKY1N)!X0Q9{YsH3T+_sPz78_@C@tF*_5X(~KZ5egAH2i}WfepoNF!&U z+MoUUx+a4+Qn zo7^2ZHA78Upl@@K`s z6$~e>lo1}<@tg&*#W%};%}9;tQx;x@Z}834cj>iuEV7#OtOyy;)&d*&pERk~FQa33 z^Zx$EsK~ZGzN(+$cDe`g?k5G$bYfFz$~>wz$_i+HHcUHh3_5U_4>>z+4xXO(2xZJ( zla{?v81v$loz*x2fzk=iSt$SfI*y3Vm+J$zgK;V-)C-lb?59@u!vUVlI}4F6PALe}nUc zhSl(jSHD(ktS9xY&AHjRHftK}6QDuhDs?uQQ+sL4e%eosg;Q)rz^X%8D5fzLB2LXf|;IC_aE5{zn>2o*fRvYv^sC%{wWbZ;IT{Jge-Z zSS~BlY~4t)B1%i)u{#hDbREl;j&y~ReeXT@Dta3~pbAgBzQHFygks2d%bJ)gxz07> zF3Oq?cA#{Gx@jX9urDk-8`LF)TydWs!*XeUd4o~#wFv}XgE%kZ#Qd(*LAhU>dL4Jw5sNThh^_<;jJCm&M)TwW>y0}B41AQPhqI# zesFiEUwj&%2=*o%vH(?Mgo?UK@hR4y}&-FPgpo9$x1Sm(gKtMy+r8>9fCgh*NdT* z8%V_gvXDSTl-pO0{`fx%;ObuU@xcIyZDGHT*zkUB6ihuYLz}+ldA7HGcQw_@UqYob z2ys3jkMi7Wz1XG`csTa#@#R7&%Ymz;3CJM?HlIUi_@d87viTLepwk+qnz1G=E!93? zjXycTase%dXnJP#OWLlS+Du)ct*j45@$T+WEcE|@3Xli6fp1|Q^HWB0V7!yCjl zpG)dvD8UBj3g%*fhwhAK(L-uWU^1HZe4(hI%*2q?eaZLHE5iRH;(q~gU~V;biwiFM zvlE7JP`RvHU+Ngm4@yF?VL^1{yjT2@a_QL zIVuphT+X;}-0~{|i;g$@0*H%9bV2Ob@!ZaT_Zr0a6HhJy40dDp1r7hXsF^T%02BcP zS$nj4H?PG$EkjtmByYl77n)PFp6EARwR1yg`S00Ap->Y=B20%K+{86*h$l3=ihBbfX40J@ObA(3E zc);7u!jRPrb2!rIYAsf&vP3R!7m_7xf^Wjnk>>HE zC+KmZ>2Cpnq!pyR3GzQ?-8d^<2d_K0W=7x{Z$DB-^kE@-l-~npCTS$5Kvfj^63U+s zp(MzZz63Ygt+kBV^g#BB23IC{g@4`xT)s;8U2xqJNNzn1D_w0znZ?WR3o1w6SKEdg zIV|*5$)h39_0oHsfj?`W9e`6QZl(Lffe+CC^V1c^RsBB{ox?7chz;Vz=&E}qadqRi zO9HOT2W4>f7h79@Nnlh$VGUG~ApWyKiAPy>pbC&g?;vV2G zTheZO9q69=8|tiaw1r&1y*-`!8Zayzp=%g`-JIX&=1*<}7PC72-WXE^%|vG1=2v$R zgo=Vm%)w9UZgE(zCgz?om>-T&m=TZDf#cj_ZSvTrr)tpY1$@+I*k;uAhFv?q0o7K3 zEKw+w3-0V(zGWZ`C^KOyG4n|Cdx)FJx0Bq&^^HMTOWo^MoTeHHTYLSbxhi${SP3C^ zO~x4P>>-nl6{^U#)P`IzBc15LJLxxR;GNK7HY+hMallAaZq)orw*it%8P7tRiS{4K zz+jz005x7!cvctG0-z4$hhG1>2?|Zw1?&uuo>m3&YH_mPiMn!87$r*3RP3GkmM~i8K|65N`)PkdC!qDgJnYB4NCptr*VE+e#hK= zD5j6xO~l|>A~|5#cH+At`Z)=r?+zd(H1hNrAjVCAJ?C-j1AhaXP8g*yL_<6*oc-|l zVE7Gy-&cd)4=Qor$xSuyo!^&xZC_+_Dxe7c9v!@$tCeL+W@(SRVCyAX70`4g=I3;0 zomLbw>~oTVCCNZP(Hj+ZeymUnsCCHYcgan0Bc;V}=w3rb5yc66N%NLIuk35nxNxRy z3~ji~kUsTfoXl2PwM{p(UBf!gz0nG5gVNicuIvoQ(*P&^nVjTk7K z#@=OD0a8%e9XbXut?1NN;#U8l#MD+ePV*j?b5hdX{)z5tpWx3eV;EauO=XdJuYUBd zR3*c8o~?N$^tSO6$0-L_J)HsaKK652_-{tPs`IH=(*Mi)SuGa>nm$MuFmN=%IQKH9 zVT%ck14YEyeJjkN)fr3K)gA640;9Bg>2t^R2k@U92DTujyEdBG)5#fx)pG}%VEB)U zXncr*P0nyspsM^v2_b?#wL@2Q@P*wU`B*|I6Avk>G|PxFpAk+WX36uO(#se*V7wZ3 zFnytBbTz`;p8EdO%8#ApRrzcm?NSveMaU#nzu? z@)3x`W9e}CF2TGhd*3BaNMnxKQd_om`z7}JFm#zZj4UqGo z%ta@^o(0{=)l4_u57TO}gwH7da>H0Uc=dp0jBxz}#z&gjfF}YaN*2*lkrj@H_;WbA zf$H|oq>B~x@;ToWYrjTOvv>+?#ycatV5NfdJ(U~e^lIM}&@VfD(5o3V7>7FdaZ&pR zF)>bRNuak1`7(oiW6rZ@KCqFG_2P6eCPfm@DW@u|rTKbI2RlRgS$~O?xz&HCr@(A} zS>ax6o}}sajXIHcV5o}AgLsxSJ;oO5I0D*zeZ2v)ZVT4hIO z_m%Y}ab90V4yThKk!ItmaiE&dT1EtBl^`5m9JpYk00dchx!^?1GOZH6&_YK+%(iiw zWw};ON!7Uqq?r*o)=Mg6E`M2}MK*_P<&~}^1^W-i;n5!9@k2>&{j3rtK=5bw>Jvrn z?ihNfcAjw$*4Je?QFR(yd~>bdct$fqV!?Os13zn_$H2=)i^Wx>;}*~QkHR>Sf~&B~ z_G9!fiNpK1V(%!l_zRu8dnod)V~N;_agOBHTD@sS7>D<5IB^`-Cpm4^iPo7aPEM@J zVEnv4p~5mapf!?NGt3pkFoahd^LV*?fY%UiAT7yq>*cTus=T8i4xxi3fS~o>YdUFi zHuJ6;HKt+Wva;~P@w~lQ`AxXH+&cSCio^L!AaRCKF%Hd1bSY`#p&3&s1R!_mT^e-% zAfa2=DVqnz3*p$&UI}VC=9t^VAEUuMnYR;n#}nx-FhsI>eJyI^Br-9y6wimwWaH zGZ>FFbQ~o0)2?3s(3w9%_BPhUCMCqd(^MG$Qp?+lJBTfUv zXjGF2ZS^d>PVr*Id&-B(1F(nOcUf`dR#1%NgAtEcXlt*0_R}AwVNcBUWm91#R8{BT&l$qE zznDgv`9%aZ2aXV`b-4)T0r;ckx3^YKg^@ z48Gfa$tQ&47WoDLUL;niK<#A~MLlaY$M;ww=_whjR4PRBMn20oX$hfK<~s8noTjjt zTBt)mMCQp^#TRro3+qp(Y>2UK_K=EU4)W0QP%Z|PZksPN`3iCmsc28lQO~&(N`a!B+XQmWG#On#uL>cRUm}r zVE_FbLDV3U>|+~@ZGAa{ml_4#a32~gTnMVK1e@cMu@>s!Du{5jm&Y+v=wYp8?Ev-X zm-5r@HC|~e^A52&(;vsQ%r>Y`ABWeYnYmoO=kg6ErNK6xrFp9XDAVcz{C&gs1VL?$ zM%omMPX@V9J+}^rxgqpm;Oza`8ai2lO!DC+t`0DnayY1P95oxQfoJ%uv+B>7T)ze2_7)5&b~JCr=at zI&=y7#f*Hkh}_NM>R1czJB#h6GgGmVI)yL5pi08i=F>X#^Uh*Ae=rn{I}InZh|+ue zL*DHjeg-DRm*9GHK?nMhOkiwQ!+ZOrply?f-bCllp)wL$z!O z<9TMibXQ`Q_w{)Md)~Yx!Vlhutt6T4&HAnH6j_UY38B=QEHN2`!?7{>->odY zq|l%n@X@6GnAX!fu=W%al^vc_zna&vZ7OJaywK|6TX3Iw7+hxn+xe$6`$X2q38WNJ z+3S-*zS1?R#j6ql9NerMd=FQ3P9v{7oCclz%KPoe>3IFaAId!@9KdfUT87^XL-zHe zuwOHF;!tIt%`p{bp_C}qpM9&m#idC4L*$prF2`NQL7BH)W{}u*)b{g*7JQ|Ef!|M_ zszo9OaTd&A#1d;8vSA`CP&S`TDB)G5e8?@!hEY}7baEh~$%C;uwL#Ul7yn(+a{J>r zMM$A_(=k%?Jz2jxso3;)(XUtIRh5D7$(1$X2p&h_``Bj_hkBO{V)J$w=`G z175>;%|W@69UNu==1Q;>nc|P6JQUV<<4P~3EpEBtkYWD(4FMZ$K{W365VfexlGH(eeK#vhzZ4A(39%)$f-f0vZ~xwSx1 zbp-2$io$BUF&Q(Bp_mXK#ylhS0J;0LSGkhP$!g!i0@&ENd-^m_p5xSHpsJNB{JlL* zgt+(wnN|g*nCFybQHVYfxSv(@%!kT8zT%+|P}eN^N=Ruu1dmTS0$(&)@b^;IaN0a} zl!d2)`ch%U%=!vJ*Hl~meBM~AE7$0J3@d@VQ*RhoaFJEd?DDP(T7=Q5XwyGKuA^Oo<2WSbi3ecx)`8n78tA@W0S!6>Xi}H}fOkER zI!+6U=bCEE2?oS-au)9!ASyEfR<$1`S{yTAccVI$1Yl(^kXwFmO<+|*F2__Mv6JTg zDZC9BJm`#j!ajSKN7y}Y-lelG1mDjr7&HU?Y4vo^AieGN?;`;uO8xC!?8g9EReFp< zM5kIHAKY~X^s=#_ooU~5cw#CLo)gA2rKvOzS~kKeLJ>l+kr7THP>CJ1F*!d4mbC-` z9BV4g2)}Ow7zzW(l72#@a9`8hJdc>TEw_HZ2b8>PAT`4oSKIP)ewt|Feeo7s3kgX)-2s%ylgJ`6mqtmme$i99Q zhXBRvhd}9CC;}$SyCM`!chNhv7im)OKnIg2m~j{zC!qC@r21N3?36=%i%N7YZSK<<~h={YUSZso4u#|<((*%V~Wed zwLQYFgqh&2z+!<7Ua7!ux)+*zhT@R|S`k}5)C_}2PsLC|v)q-(vN-vn(`UziQ2(L+Q~D>FX^H zsYSGmwiN&#I-M`z%G*c2-!u#&-kAMSf!6kswg0PXBj~jx!3OnFnm_UWAw%0RK>{>v zX*wW)MZ0=3qpc4>G`&I(Z#Fznch-l;XjhcnX%HTl6cssoQ`H}X9v-+Qra-Z7QB*2_ zfFuufp>iv$1{PWRQzAztfc4tU0MWGxU`k|lJB9IJkr-fu4cAy-P$wY_46ck;4lpz3 zN`*VdJ_s8#Uy%>7xCC904!soGnPx!jz5?!%_s-|SCoQJbG_rUi{$%#R*fsehCh3!5 zkN&&Me1n~!@9oo|poz>^kdFheOd^!R^m)u{>da6cV8N2l_Axg2LH=CbSCG$k3>2gz z&+6p3qBb6i1=WTptYUgY(d}tj%cPqeKiC>-zijg#uu=D9gm|a}@TLX}Bcl^$>s7Sj zgQUt=;*-aXP*0_w(q6|UQ?{W04>CTx1i)pD3qP}xKd%l|{}^}ySgn3A*uOZXhkt^` zLW-TD(+C*y2u6tL1bD)>oLF9W^3JRqK=AyNtumZ@mQGXX%*`L(2+80R+SK#d{n?sg zfk2!dxF^}+Y`%eyP%{8xTZVl42d+#YHG_K?#K(4IoIVZUK^?wZ>93F+-aB7+1-~ui z2nwj94;1?eY@q#mZ};Fd9U!D22e)OW9pkkOU_pT1ELjwVEC&?HrN_y9e5q@**TeB z=%A9Llf&ka;1FfBLkwu4xb($Kn&Fes(~nQZUXQG`C@UrNCWHbD#L#Kx(J|BEp4GJ` zr@MMMjt<-p>bELtvOk)&^E49E^R<7_`qOXZv4N=$rt^md)aL`k2}x9(SHuMl0PLS+ zXUiqd4bKGG>k->IryWUCz@IuQ#{qL4%Zd!tY3Dfdv1pid+L)L{7&Mk;91bfIy(QCM z>Q_QM8z(b@LtK{cj`;@@Sw--$yVeq)QuZR33BQoPwR|@f!CC-$r{270y#|kRGWqPZ zxpfgRuWJBYc0PC*VfpzEtD|9YJ2ytKM49i?=V=;$E4 z!_y4AE7-~3xcwU~pV~eeydT{{BE7M_c=jGwLd$p!CFIeWexnJCgXs46&l>67A$)HJ zZ+7(4JL%AokfTNMz2W}23-WVG#HVyGet)Q7h?ZdqW8GM? z_WYLxpi2teW%M*TQBC`fp1?No61Y?!1&wca_^?P| zgJihYV)vRW&H5nz>fA!3CZ+srl+_uW2{8c)6P9kZP*_qGiBcbgYK^eT2fr*Fr6!@FH5QHIYmk?fd>*@Lu_Dux$W^ne7)$Q@dG9<*t))>;?E!ueT)qpHL;W@(l zWz3J#^Y2saPFt2dKRPzC&Wy1jz(}Z$Th2M0bYc%$HP~KCEoiAISU6M=l$qhrHXVbNwIqE3<+&i$ZJfUWa9d?}bYR zw)usL8p-b_$s(5f9wHXG#(x_UCsegT(SOtEwa4l(`167)S&%EU$wxQS@VfWa>lIBk z`wab_FS|ZxH3s+f@1%-VpFCFEKH`0wgh)6D&5F=az(|ZbmtFF)yPyI z_p*6)5jY36<$SZ`#ifFUpNXqaBoOhHe8?z^b$J_E`A%YDo!2(1y!nxei&u}hT7Az; z|9JR^eRXP|PW{@reZen1SSl&O2y;^v}BqynSky{KdjL2_U zg3Yv5ky+&WuD#sVyQdJDtwbly*kWG7)mRDz<05+o<5OaUbFyk9`yC(1tE+76*_m49 zK-Stl{%Jnx6`5n8=BCzQ9f2A33C`HKIMmDSk+0*$bg=JX?QD&z9^nL*EZG z3%LHOM)+mT$Fk~muh|E+x`lY?&_cpxPvMN{9xTpE z?3E(#Z^3Kfw4;xhlf@NqA^j6bh7 zx77l@c$_0MVrXGedP#3dyRCFpB-5ovfAOSWbZp|}8I?9O?)jq|1kW`d`91gpG$u@@ zpw)LLsWk7S7p3{Fg3i93h2#3}q$Hgf8gKOEN`zNd(^v{Ie6qPn%I0F*-TtYjldDP3 z7M2#A=+c3f!YBM{F^}jGtCjVuR6`8^lYro#%xvBu}cE1XX6O>zV6G<$tI!2Z3fs^w^`)5;wR0#6~&BJAc8OUH=h;FsT<+h&7Kbreqs1*&lQfLd@9MKdB8`(@4dBU zo>$~q_Lk9xQqq&1t=-l{<|MZb@xhPNVU=meg~airV)|{W0f`rrC3IrfQ*9+@`*Vd} z%_{3B4rveQ7S{-x1DSJId3`mL(Gn0(zpTn47_U^PDzNG{zWc0CT;j6tTrPNy>BO*V zIwsls6JMFAxpxK~9Y)sq~!;C)u>yCs}1jg>ST;k2;Dp?X+ zm<;*vt*Y%h+;a}A8S>eqQvP?|e*5hC^1qM}12pgYdN3Nvj=W-c+DFVji7Azrc=N#F zz`F)#C+M^XhWuAx$jJouColmi1q8ERoCWCSyS!3Mu)Qal{DEveqjmHfOj1uPKZk%1%`bK@3W;*ya{-*nfG7fuBB@9VNpYValQmIx(9D+Un%-jwM@5$?G$tZ7d>seh;K{IMc+(*6Tza zDSse|Yu4L;CqpK328xEK~h0Nx;c9k{l4$SI{%%s)>-HM>s<;7H! zThU>*_-98MyVOg^U9-2{7zVukWuC{VbG+m;HkHq&AW$J;!i0^BcS!Ko&7+BYYb`z2 zt_~qwk$>|vr0gcEw4m`VsV9OrSZ}?h&tb_SJf+gA)R1a{Yg*)nsw$?w?IlRw@G_>z z?qaCi3mafV%z@nr;$z65ms2pu_+wl3ni!+hP97+c!klt`rwygy@D9vnUdABhf__Qi zv*LFqS5D`;e|Nf=~4ipYuG3;ya zx}9b5)7nOmA3|mgA+j-^pbi;thXmgzUb*$^#)?j06l_58*qd+Qjqr;UJ^B$$N+Dz7 zL@Z|-9QzFIHrMHHlg%nJdZbL=rV#ZaQ1v>VI5+S`OXr@%D#c|lIk=6jiXVAf?lh%; zxJ{OaP^_hV>>Jxw7Z@xYp|3^OWI@N?C)xUF5a(f_0z~96;xuHQm^qX9ZVHj>(9?yx zhSH1UpWTk?eo$+e*4jv7`gQtcTb|Y}um*61<~FBsD&FWUN{Qn?V&&_TgYnj|L3Nm@ zxYD40R;J}kS}Wh_##y^)>6 zkKr=_yVhSJl^9i=vbAW@1$w#_;+MqA9KJ{ni3|zjCJW89dCWwU#=Jaz<+pVbi+(A= z>Rq{V7*rXL|8c3=1;1)|Sb3Iv$#p){zS2zUI4YhrS!re*a0#|4^sDL_LwOJ7qC;eSDz+OEqjdrjFfxC;yhtuS#{tP z9W68wtoJ23Yne*V;dk)>XohCc8mF&HTI-(8(G<5#n$ymT&R`zt8ZX6a%^XSYebXPf zr`|YD^U7hS_SL;^+UFHAB3+KrO_Q6=``y3G57!7#DNGLsXBr$)Z}@e|ue|66Iy;3_ ziQS)Z0wNzRP}M_PFvrpV4iP;ZjD#(2hTfIZ4L*DvxZ~|}=<5OhMuiWk^8%rRoL9m$ z(BhR_bH{O>;g@3mR*AW{p}K>qj(tP?KG@)D&~!b4KcaUJ9-5A=*6~dS4W~5jr(C5$ zm(h-&l=Q&+#hg*eQ&8tY#fOlQj6MyM+K{cADHF6ycHQ z$?fuoo<@uNdux-AN~_u;jD{&ZHL^<)SLK9FLVDvb+Nz7AT{?CH)c5R5i-_xwLB&Bt z)R^m^jxI|8Q!*o%+{pd+Cr;o`GO2Hrk^UWxYYW_kmt?%{BH#7v9Dqg>i2az!MK%^)QGPKiXZLA3C5;g+H*nH69{00a&>L9u{ z@c%&Kd!R1W!?EX%b+-Xdv-c7}VWBB4l6|P*G+PYNJNV zz8&IN6ou64&j&&OxdFfDj`f4_($t3_^H+7PVgEA$J3AUT`b@FT*05BQ#7X_|qNqY%Zgln3ngg}IWDkHiu+$CJfjTAxU&4;v`27O3LqYB# zl=I5V%O6+X0Pg7=joEJ8Iq84OJu-Gy10Y>?eA0;#XGlW1sG^R|8=)fNqt!;LyLhX| z@}mWsrnrG-80*TW!5_W>0%1jo;nK0_ew)?F-j4)Fg$x-A1_Tg;Ig3@yVlRQ_jZ}nJ z`>!d?v!Q1D{Zup)cZvb;9ho*lm1w045OXM2M%6iNh%Y+tZ#ZxHxCL1G(@|s|-3pYs z({Tfkb0l{A1p>QnFNuzUU=1TMzBYgzp9n11kDDE0DEQ<+ItJO3Gr%k(qWA$S2R8x1 zL$w(1Z2z}@j+&*7^m*?9-}yKZ1qi1|)0F|IZVv!wurmntO9wE#-F?)=0|2sP0CH>5 zXn2(bH!vM)F-ZYPmkofqI4w?hk}gHHs$nI1QxI0j&>@pIj)A656J!J|;HKt{9_wV0 zzmTST1ff1EH#cIlE#nj-U=FHBCW6v!o)7`IEhzSqQL3nG?m7m14m?Vbb-W22CT=Dm zA%%7P0;HkDQ~_ITw2x|BWlJW)pb@~ZY!ZiJ6P!8LGV^Vcjg-oP&R2Q=3*RQoIQ{pg z-N8hy4DtG*SuTjW39&fsliU@$5|XD*bxyijwWTdcZv9eD80~W~+YHW5QM3$wfYg06 zEY-XlC=Q`AJIK>}m=OmcDrz~UruyA}XAYEoo3Yp0PM=s1{k97*bk=Dw0iQOr4u72d zt+~_mcYbN|+|?DUdCY17H5Z8%eT|w+Ds{Y+xC8D)Bi3W!l6G4LDk2UgNJKy;9h^Ma zj6y#uHzrvQH=gh^PL}LnOVZIw-Ab$J=;CKV;QHz&zBiXn$pXBTm-f8TzlKw)2gUo2 z$8@eaNSy&L!_Gp~%?3P-$oU0gT)LI$JMN%6;Ug&6tz|vd@B;v)>p@`1My|=^hMWYW z1jL7;$j&MdM1i{9?7rqT(XzhkU!W?};0Ty(f!nnJY1uFe z)9nf{&+J9)SOyndL$)OFv{{)7B?{uEtG>*JvF-ty$j}RTDw&O?E_l9cZ@mTmM7O%s zVP>Z5Z!D#_4+FJjYC@<3CQ%Tq#KM7ccMM%Z+|Te0I>9UOx&4c5$t)MX8y;iY9Ep6> zLZTga2z4_-W~psgmwmgh_(C|-+1jQ+AO(ciP^I!twksSJ6+PMV$K6t-h>lGDpkQl9 zrQ)2vxm=IgTyiZSi~{Ea;e8~4QXL#hO^=Z|%~(Xys7>`|!00{e9XjFyWNEXBo7+m> z*NM{jhVT?x2$>nWiZy&dbd2lJEXafYK_Og@WyRD=IzT783es;~_j4~8?i^{{z#%$B zMUA(eM;@!oX{Q${wQOa>8$xMBAd!>-k9y5&=2-X-4-4nceJglxsn@<5%Fymtg?|Km zO7~|Hil@h5daCT>@!Yx>7-7G6tA7`iUOf`a-Zp|RSaHW2ngjwYvL;}lU9GK>!6U9b zjP3E}D^S7Hh5Z0T74v&)N3iZsSF+8R*vyX9VddbeA;hSNq0jBE9%KA}`L zQq^}vj92gPkJ4(izq4||jY2hM(kDX1etn4yGfZ{i$m=xrv$-EVW2*-FxB;p-?XLYZ zh`~07hXc~Q=*jP@4CFfYO|L^$Kx>du*$XUY46A(f;9DJq0Hx0m!qm=Cf}z6x3Nk9a zn(jX9?l1EgAYTfSCmFcenYtBdyLv{JUR-QlUfh<(9zqL_f+jYn&4`6k`zze6L!&nl z@*qUZ>x$i;%>YdlMVvL=0&Ue{b0#6`zFIohHi8)9Es!OM2!T@C46n~drHI6?r1IvI zHIM7r6xbFk`q%<3zwk;THqI;(se_f937wJZB(Gth?1Z_obw}$U?P@TRC`xhcM#8=D!M$J zOt`IE**Z}cO*)X(EN}O|H2)Bb3&n{N%0v6?R`8{%DZfKmtv(OyCid7Fx;hOxnvSW$ zOEuU!{jYQao%T;Mg3q4SD*RT8a08YY2pk>Q-&7qL=h#1;dZf*!Gp)D5Rw~IVv#;%t zVIfALhB&5Gs{F`VQnw&`=~)?5H^+Beau8BLYP~Q$h*ScI821dN4h+L2jC&maa?(I2Aron^QwNSmfr^OQ~Ruu!U(utfVt9yJfXDQbN0C zuOY(Fr0{APf(}y|AEo%$@(04C{6VW7%|Y)kq$<%YzO_EO?c+}rNWS5wNvg4zj$Eqg zr=Use3ip0?w5DshkOWuY3pf=7KQIh>2{ln$O$?17?g`#{uByLVe>!m;=h3HK@wOFg z+phAZm)84;O-l=yi1b^Tq0hE)`3%v+Yfy|1eB6Y!6gUWN_bINUgM{^-0$YYw29DqG zvYBOa6bz$mUoRfZe$*@Tee$oknbJ)4}Jp|XYYn96c zA@aH3h`Icg@XaJr_I>pQIcc7W2QPzMMmQEdFREj@rjW0}FepZZ%H+vpr_0bJUy|Ii zY*wNUNy6&h5Gc6EYJuSN(-su_{PYtamLna5+=Oz6N{|M6!PK240r6yxd?s$RDIzi~ zQl<83#$nB%LPNC>5BXnEr3D8;Kf_yI-2H;3t8Vp&13x_^(G@Paj)!Q`=v!!N%n>kZ z)+d=|$iOMRDZ9Lwm1+_K3FV(Cgom81cBenB#JTc=N>dSnhkv5NnEIS+U zCaEZSYBqG*-&o%J)mHy_jpSxN1AqE!m&(uw*{E{O^;CyJ6;n$4-R2AAdaUH#zPoOw zQj3oc>BvKsvf*ZI*qQsYUe0IqA}2BR)Lda0Ul|^@8BQ3&@w)A>=H*>vq7EylZTmZU zD+!g`1mfP__Ahwbv4)k*{rnQeygq?p*pT3Ld4w39@O>u}J zrj&h#0}0m)a)<7~^LB0(7-{hRL4GbgVeI7n7Wj!?Sy(U8{XvXGOJAO8oHQ#$+{?!> zcYZ!O#FX#K64=5@&qd=4gsX`~Ez`gJ`M@UPw2{t>`>J~X-{L@CrrKf+@C*1?U|Tv_C? zxM?KS-KB<|>3b5y66u|X8q(FXnEPP)x&L4N`f+XPEAHnB_%!Fyx_SorW#;{C#Kb?5 zZj5m&W5`ClCJ&A4*TX)0Is7&DL?G%g-%zx>q^jv$LJaob_^iYm2XD~x0(*PL^CD(w zQALH`Oxf1gt;C8iCsIEBJ=(!7RL=y|^I?P{kYiug<4C&W4Rh%upYQH@HsQ|==tR-z zcqwd_B)tzbuBq93Lhi6oI8qQR*Nxd~^%cxi&qn);2hs&Cw%)#8-oWqw_{{%3$X-N> z6)G6WJwU6f6(Qbun@@zaFlV6{F~d~nn7PV4IS1#9N17sYMv+|&JM@;3%%2P%fd0mb zxN9>NVCNHf2R+B57pS#T}ZML>7I5gy{y70h;X#K8#H!W5*j@ zQ3gVbadxi^?dVMiAY~6`gM}mM6$r90y4pHbn~0gEy)vrUG5l&k`io9salXn>l`@=l zx#I4Pfpi;B!<6rZ7)=j+znnS`;~10HME`!fH?*_j ze#Or+O3_(j%y-^y0bBm$j7G5t^EZk4aNE0BvhTT3Vo zxDb90HQk|%L`UDJA9GWbV%io_CX)w?#wKK$W0eVWqtVVi1MQa?u<+YdRzeDf7;>hu zDwO?ev$un3o*%|X#OfAv`rHkDK{cxr{I1gGQ4<tQSEE`cv9UjA2Z{M<&`$vHDmB z;YeXhkQPGpp(h|i=nZ!fLjWc)P!9z6ktx9_lNCZVi>){09BgytiLKu3vPn4N^CaLp zh6tB9JPRk!-m+%Qn=3WCzl$Xs5|JeX*r7A{t?25~D=4hUnD@s0z8h@c)D1t8FOaze zy}F0}J?R@YHcqvl`(Yv%IO0V+xM2l}>9lKS!T1%V1Rm)J)JMcOeKDzf7A4rLEKosKS;L z-zcIvRXl2Qrp*maa}$m16;}8+sH20ptw8fxJT8ZBAp6i8 z0mn3=pu{p#m9@qV?I}!k0jC?Yr(xHkVX&bqJaov*T~Iko?6Y14#b91N-&Dyd%BZ-k zA$O46E&M)4{cyiLzCb?0G3eddLV9!_g_b&2Dc9UBLZwRFO1W%aj5JMiPbbg&g!f@V z)yJKZEI+Qx2JIX11iT$m!ca)wW%$XM1Z0N~;jIaA8;cFKc2UWm8?@T%DF?J9UNNI# zdqdu1C($j1jSh34R&$1>ti9rwbe8u&H}k(Ylv?N0US=%9!&OA@-I!V}wXNzfl*vY_-^fa30f zzEY>s>F+gLn5}lxc&XGY=ikbS)?wJJKtR^!4B7ofLyoTg9vT<~t2j;g)Js*gKmJ=~ zoTVpVd!XS|KDJ%P%xnMt8v|!E7g2xcr8}UY@~WkZJg7Q;l~$Mb_1yj7%Fdhpiynw! zIfu`!MeSC&m#riv+cXI-Oap5SIrDoBiNqDw-(Cr6xR8%V&S_4*52J-kRRRd+m#Kby z%{R+-nzFf4WT^-OK7mX$YxLM&UYozNCN~Vq%+Jt|=Kkbi_V4c|=mz*P*A^u<8h@7@WKDLGVzLczKcFN`yHE4+5%)iS)@Rks;73U|BJ&nyR=5* z#L??HE|qJD#*urIO3aS9!(+wL233xlcB_pq6qF89Jrtf&cWQ;NZh#fjaDykcSlQ|^ zztvBdWBZ-LosKbAe#e!u$^ED*i;|VHeAD&opK3RZKb67pdkwOw;UY?A(YO8gd;e&} zxc@X_YOnDQAv(`-0%P=llH@l6c1hr9!+zq?_~y@;Abs@KA^s1RjM{8pTzv@KfObY; ze6Kv3`TXxsN|9nFsgM3Y3pYDA8dj6_Lk~8}aVJt~u|_vx{S8V7Q|d+v#DV zk>`HveFm~rdjV=7)oQ^S%^0vdqRL)+9AJief;GJ_TQ+Eo+u<5+01T1|Knzm=L;K|b zh`%AERdDbW5Cq692=3tAY`Swa!euEnXOm)TUHe%-5l{ifXhLPBwp-D6zz6;aI3m`2 z-$%3r8iAza!2gqEC_c3kmLdQ0;hP8~Ng_63)D}7_RPdN4<lw|G9wNm`2LiNS6)S_+2KBm^Vd<8qGv!o^dOCrFxS&t4y4{U3g-M$_Sa1dU)Q=F2tGx7d{%(`^$^dWdIIWtqo{F!Z!xR=LOb4E{+UflFt~U zbqGzEBx2jiTjcPf8be9<7J<({vjQJcll61~>lj4NWGLaH8v}7VUC9SE@Jypxk++ou#HAJJ&HvD@n?sI z?P*^lt~3M1o(zC%aG`UsP4@C z)~T`2XOKFz1KCj+VGHr>UBHESK~3IH1z%_Y^4PA=$mP@-*&gL#$S|lXF_Uu1(Z7~5 zr}Nnkkk}iR-vNZT$jfv-CvIa<@k!~Rf2i@epTMpk${d=|QJRD>F}hC{hD}WBZpQAzy@NPw6!vV2D9531z(;Q$=}FS+YtDGjtX6gj(-BgYjITO z&vpZtf@1)c)L^@SG~c4;+}Mq{V5kXXab>=!4xoQJswpBCpc_K9wsY&7Cmi12sYzg1 zoHXDZaJnhrI<3%p_pSp<3ll}t+)bVi3M_lyiPVE2Di#o{%10DO}EGFBAzmy2cq z8NTM>Yyq)+1b(6qIIuIsYe7*^v_-tH1yM6RM$dYaf^)e8SxImY8@X;2_@QA9;Dm0i z1kNF{VS=l_b?7`?n^eTeN;a5|b9`ojzt)UX&OtS&QPr4DY`Q&0<{^qkAQql113Pye z&himmIkd)@CGfzf{R2t|L>gLlJ>8NX%887hwczXq0yH+i<{JGZJMkQ70tz{eyQAnU zXf>F48)d}<;F0gJ@GzickF>|<@R~N%`2{d_+Y#T#ZKDUE?s#L_=d;eR&^q%@)9k9j zbTJ*npo+sJ@2?>3qL8DZUFgy+05g!I(>?G5GzyPOgp45*tI{H&N=H?gqQ`NiO&m75wDvKQ)+}LmTJK#G783 z3K<94;3=4abKgKIf)`S8qLD5xVGFGr^9KC~H_E}9Q+*m^=K6tj*9+bV3|v69CJ7>n zIAA_pdsD5eelkn>AzhG_FZv8<)x)O@c2iSC8oGINsnH;y%0z_2zR`QLzHqc_#0R!W z*trjyB$Qe2gN4QS29xZ17$`pB7sepjHJi7HoA`Gw&QBZRc1nn;9Kg+*N&WUEMf@Vi zgPpj0OqDKm@4qP3iLaeM%u_@s3bh!jLVHHNtAXQKl$gyX$*`|NBi*%wE8I?&@q<;y~A#@_bnDslp6H##5qu^8dGB&U-zE20sWo={-$B|`wNYx^S zr5+?0qo|4uN(_C{yzMq=8CLI#Y_%$vII`H$`$$S#cgLdX=L^H3twsw1ikkp5W-JGd zcy4%%%E!yH`3*SszBlJOj=sIT)$!t*z_LgXMl|UUn`)7o9ZGLu3N|gwCL5tH6*mPK#JFWw7lzO#$|Fm{N=k(glw3HBYBPJc`u)6T*%# zf6!9sA*vv1K5VxvGpe!sNe@1)m77d87Eu~guXWP)NGKuk9AJNm%q<11_&+1uIi(v{ znuQ;I_@yd$gyG9t?wD;9`rxfsvy?Xl&4u!_j@kg$u*BrZRfN++ zgE+iN*K&=UM(yJLF#PN~fnd0*%*#WAAUnz#YP1;2fKHg3IhQWp&6m`97mm|5a=G1uSiaqA6%fUsvZv~Po&Kkl7CsTHyXn{OS);bp1r*B-xwJ## z5P7{PVRKqiemD~MPEP5BA0n#f=NsDRDuOK{NP`h6u&CJN>h}!8lLYi#uOv8v;{Co?)kiHS9$OyefC5FsixTV=6^6o^|1@ROtplg){vwUYucM!{H7$J0` zkrwG(Wi`(QN^(ELPw;lLI2)tp3!P$>1_43zr6+gGw_k60taIYakGpD*+m8QO!^j)p zC^?ieeXlV{*exT&-WDCnpx$6?!3OWn8A>_3@B6umyVvVW@vS#UxJZkV$K-1Q(# z=q`4QUqZh>*!3!sHSkef0maHY~?$?Qn}5_I_daX&!Fr>4b*KJ0U< z`xaW1D_)eF0Vj%Ota*l~3*UL&*Gc_Zt2{POi1R&2c1Q>?WghN%VQyAIUVC`Dq>Oti ztmrpJT%p(mS5eDsig((jagl zRg!q3uve`cr8(D=qAt;1{&JuSX%=6z$V2tD+^9-_e%h&|AuCVNH(*%X`hJILvR>HF z?If3rPP8Dv+XwjY{`tOd>fwbyiaCPkxupq=JlE7Xf1t5_;bmqzs@cf>o?((hMZ>;< z7UBL1l}9ikkT?fJZe9PfX6~lh^^S(wP)6z3>I3N})QS5b`D(eL?Rouc6_>77-%e>- z8=Dba&o(~51O;9Rov?H7bn)-L{=Fw?v0Z9U6-}rN1?^epNEelhtH;IreP#Vn_V@E5 zxTNYkb(fj&T07c>2u0J0oGLW%kKE4nxpvg9*iMOAmSH&DFVUmlR_b`OI%`EI zFKa(d;XR$#_nb{GzXv8Xh^dj#-u}?x9HSoCy*$$p!ZZq1tZQj^_?RFLu_n>bad+(V2ZG6ban3f(T_qdX!Gd>fV=t~xS7wP(U`(1|nrB(UncNOb4`8-$cquD|7Y^PMzS zP(Gczzmnf?h5r2wwgeTR(D#3|1;8fwDR!UtU6w0i0uy%{?6q;qr^fb|yU3jOYD~iT!$np7|x#bYKlvglQ za_Y&{_ZseT3JBi*@_MZY{>u5uzI(f}?`fXn@fz2Th}q&E!}%_4t<>VykIm_Kd!Fny_^Gr@=R^?#Wnp;9bo^U2gUJ@` zR4U1i_l}G4nMTPqTTKKLsnUvWAN6`*$Sy(>3|MQxMm4?=8JXJHr2uH`_3tP$fmM0Fn5?vU7M4a^+N2kyW9}VHw!Du0p3+r zKala4wteAdbU)R`8bB=)GbO#%8Xs>&l3ju<^FAO$d_Q>yf}(ne)VZ~5d~0KWKkLI3 ze+etsm;Vx0c0gt(OMXRp766&NHvt;n4p5mC4o-m>p8$-v`oJ3{Y6RqV#y%T2E1G@la9f`p{c_ShN&;Dv z%w@*4#X=u}z^{vpkuw4y+@*ejp`^7ndrI*~uhTuojqd6Tizu3vVP8A>l&?Hp+6pZoSs#OoF9El93W#16M@4ZzM{+`P=WDOaJ}{ZEjYQ^AayM4W8CO>M$)&vwVN&!Nm({YRrfXxkbmVgoz4;UdTUqL~`_}4Ya!T#o# zdtmEbk&a0)zUJ?H^~k&?M_OvQNACur;sZcEkH9Fr?Hm>J2K_@z2pkD=hp*)Gy^CO)oFp!ol zeiOJ<36qPDgXw==VLi4eoQAu#E^mu1CmpXgxGkQ6at&Q-)OmMFfko5kNFMz{uh3moxcz;X>C zJ_Vp19uQDl#0`Qnt1LTP{E+C19lLym9OC0x{n*>S{Rlo}Z-yA~>@Fecq}@L+H#iH8i(z) z@;1HyHUMfvg)RRSnb@JAS?s-Cm>TnR;%rIZ;9Z^{#O)+NbOYK`|5uT(PxJ2i9RTND zz4tC?Ou?cK?xpH#0DvJ;jNoxfB_#4f;%Gi-lVn<0<4fGEa#UDORWL5k9!NP&Mk16S z)axSiThsOXR*~Vnrg1|AX|Sa7XfA@?Ie=1_t_%}fD>Nwb=VA{r1=N&3~ zZ}EQ*&(d$F@q$T%a-o?A85fexkrpumLzW>Lqio+sadG=5X1l3Wf0%Q3K@NYki$5bsj zL4P{^P+0qH1qqps%uA>k=*y`hM(83wiT^3B0jCEpT7=b-Oby6SZ&wpTq*7&AJCm8(z@Z3i`Y7eX(I;`>A#so9$%j7&S+V=@L18aI1qk1??N*Au5 zUby=#7Hj88max*6#>M9a`aeU`y>FLwvt9Ui-a_mkn=*8((D5aPjTAvJmDlQuN=D_M zdqRPBO&;KwxN81@!C>&4lv71QY)WHq31o6 zCq`2zd4P7qW;SUzXocWmZa4yE`_7G;bftVGR%A22frkL9Z~TBI z-O`aWU(Vvp=5UU@eVGS|E_Y13!PgRlo8(D~3J1vahp+sh!m8P1M)NSzZibp+E4D1p zb#>F+6I@zt&?j*Vi(2Xs^k&VJS+Y^aVGAhq5s!6yJqg75v&&`|MEfknx7yK#t2z_& ziR!)3e?SHD#_N^%K3%7NtB%3MCL)%`kUFq!l&3h6U-j1M%QO$(q9&Xm`bHu@b+4X< zPt1=pcr(WZm}4(M=0>oTJ7V7Gz5)3k%_kl66u^@-fog*hMw9Rhf=b0yfunfK8F*Xp z0{&Vj&5Hk%`g3Vpra5ybA!F8SV>uCS%(g%!_p)otlsxA{o}?TkrmSJDcoW37WPHhV zZ=ju}fu70d*-nwZ#6}NbRNSux2_Z!nU`fC!YJE4>=1Hf*x_COiFVQ#fcFxcHU4MU^ z#j1p#o6CH@kQ(T%nsY>c5b$8#p@0w(*EWwRQRfph0ESRTC_t(1d`j%`d;7pm8w^Oiy_n$(aOem{UPMs~l3%Ao%4zDYF$J_95xu0Ug< zi6odO(qh5-0aMzSLSUk3cy$xzU2|tVFO0Bn6jZS)jSHV%@d){X0@7zrsucq2{Ad2|z;9Ej&G^k>a24|ga zD1ZISOz}C}xzN325KS>c~i~}p%oz_i$os*H6&l6p#TbW=NriTs{ z@zXk~6bD6SaCs8T=72V?LVjXaWP+&%O>Trp(F3jm)y=jcJJ8H@k@%j zE%jJ)OGi=B_lMw_dkQNgi^fC_6=cYbe~2r)=_q@;qIi=4zKbPYUoKz}^F_KMi}3zr z^wpj}k~&}q%(AI#3Zzd&%AR;I_>XHOog??;NHm3%_t{seZ$mA>4KvGdw%EilfaLe$ z23$i8c%%xC6q-_>J>AJl6mi1iGN^(61OUJGPZC6R1n;%Bsc(jk2$nR_Xqvl6dYYOy z->pG^^Jlvr&xCBv?j6aeYNCWM;a&H<;8lT9OKpJjd1H^eTQ25qZV797!Qt6{m+=)E z)eZ_ef<+)7@t(#Y8Gyt6syW4d>va-a2Da}d17fDSNCf9ye%`&>vUmVzU(3QlrDr#TSn;`yGbXacyo)2Gar}o?T|x`k z`NZn}vZyzC>K{3E-Fm6XKd82+NrMT2#og^&wQ_YQ$zYJ=2leUHuE4#QM^hiG*`2l? zkc_b#k9pI-g>+}`2@N;=uh|7RK!aeiR36`;?9Cgs=gpb79-c)(6#Jcz@FGUkE5@5yR@~s1`INfS@IM-j zoxD;_)UyZLTCHe5{KHTR&@4~0Y8rf`IE zhpQH^Zs-pJyDv>K>EpXTlzflte1wj5jxE(;9{oqp2&H#UU*u#XIy!rq@5{R6Y)@Sscmfd6*6$5ZzK%Qcl^9leLX%>tMR>1a zXw*w}a`D}(v10(pI|JWhBcs3PBLHkfMPr{WTDAp7A{X#;8_6jteLttFb+#JHZ<}&m z;Y5)yrzxg+ies>a+i-JDeA(DM_Nuq(!`XhB-QN=i7%O@Qh}uhGM^0ppk$0HKM7e3T z(=gT5TtH6lP+1Gm{grU{>+N%}KkET`Saj>fAW+G()J^~Q^#5}X+zCVB>I>I)FYqvP zqzqGvqvdfZtx`(Q1!kUq7o$Q!?d|~yKIW_WJ+)uKN$N0mT^#4$p9}n+H$7ZvNbaV? z9N$g?lXAiPN#GqZE#K#ZcZDC1jLO-tT^)^Z12AM!zT&NlIsDWld%W6G2k_vF1$_mW469Jno7vNxs#WaloZ*OeGGa^ zt%1M7xsG=F#nm)M-XRiHv{_J2rG8SbSE7ISZeE(fZaeYA4TV-7u-)ff=A@h%9xv2< z$w~s49)Eu=pq*ZZRgXe+OIVheEpG-ysQmouzH|BD#(X#`_C){+_)k?)OQBrOBJjTf Dds2Dh literal 44839 zcmd42by!u=*Di{Llz=qSBHi7c(v36%(%lWBgfvP^Hx}J&K)OS^yE_G>?%c+AzTbDw zz4yQST-`kEz1NyG)|_LE_k9OpDoQeFD1<05FfeGc&n4AhU>+QTpJSwl;2TLzPHGsK zN2XR15-PG1667k*4(3+2W-u_%!xEAZ_2XoSz8{jLP<|DKe-+dSKMfy?*&0qxuKqY7 z79LCb5wci^B9msRJ!VIkmK2WOx7TBV@7(ZO>_|ES-kTAFKGw0ZJ_*o&ncY4M5>}#}5QEVF}ijHu=#C^)g zdjph{gf^F{7CG#`Tr7>_HsXcuRMKxIglmRYTDEEQld+3(K|2t**J>TTMX#!at?5-9 zjqhNv%Tr^?@X)4loP#(;xTT*B^(IOlk++S%8H~p{{=jL9AAz$RMwm*pK8n2(!lyU5 zZrM3;x-N10`D`}ctA{x1mdFItVe0M+mXjL&D|#4$D(w$gs5(UDffOuky5HXFB)qFD zYvXq|d(#_0)NT8olU>7(tgky!hSKuP3?Y2kvy8ng2>;9SX9_2^cz%&7(gy+E0nfV8 zDClUsrlOy-(V6q&Wz#b1lL$;Of1tKm={V>O7suu_y?%9dOz4-!*c+qK^Nl=}HXNJF z^w>$t(dqO4DqDHZC1q8nr#W+gFJy)cLeLJwj0DG+6~u+AfLJFwvM@V^8t}=70*0(x z4E^x~{HRx+k~=CfIKtRs$d_;*kZ7elgVE2!McLv5h~IzCWG441s`0dxB=v-^gOPme zd#T6ADaX&QhvJDu^&z02`E}Dq+vlC@!bmtOHo`FHDQ0;K?KlFZN6(}&pS_E&MJ5eG zTw*(V0{aDVu=6&g@m4mbc;v2c}jOo{Dry2A=YkhBm!KoyAQGR7~>r6<*q zzzCffKC(rYxLXqw0ITVm?Elo8*ZOVnTi5>E0t}ombG%R71RnLaiTWR&F`GCoy%23* zkwqvr)h4wkJ%A}{qT87qliwJwfi=#!me-nseQm^tvvEVUst4zIi3t-q8PN09===VI z#d8?}csZ{!-H4;jKX@7)s7C8A5M(i<`xD3HN zHj}_9ioth?taDr%+%SIR4ZE3giBj!;?ezfpEmwPL}ObRFPz0Pvm=_!a!)M` zOZ(=s{4v_4J>-Ey8_Vj&k?;+6pb{$;FZK*hQ#a`vk~6=K95dC64;Qg9?^)iH#UawxC*~<`YX*W#!k(n@VbH*-AW1O)I|=H|gDs34xEo17V%{X2bE7 zW|an&cr%J%)O33pjO@bEh^@Y@sYR{&OpR+$TqQq-GRj z5`TwfWU)9v?fwCrsWwOLqX(-Do#>r((lrl$d|!BN5o@DjTso9BpEabiINZnD;aFf? zwXE94Ox;;&dXjwAzuphmZ_|H~+b``T9VX56L{nsEq=-j4H%dHGQhHtbI;xnkk;nW= zDADGV5>9*0Cet^myeSHtDaPf~Yw@y&mT!MN$VJHQlOZFx{X%EG^XlVE{#n;W`8w#h7PxVi^HoE$K7zyyi@USAd@Vc1%EuNI~e)ZMD{Sq~9VP*Zq z>{I2O`LsRXOEZTATTSb##hqxb@sD=tc8#6&$kAh*SAfjqK>EZS~6b`DaI~ z9rpJ2)%KgpY$|I1u)$~VyyqyM4oPoU8dt(k zSG6Fs2s@?T9Apb(^Eb^iweHmS$gXqL4>#D+Kd56jFx8hgATr2|s(O+t?7luVYARCa z)v(^M@7SuoIMz6Q8yp%J`aO4MHJoOiqt&I!B5u=##e(JiqsnwUP~r4uwh{pBC29w}Jh1MFyWSxhSkDN zDTY^DpY9;Rk&1|N$R*dgX053wqHi{VCpq(DbMtHcSRF_0&nyFR)rvfs!YgORI1kY^+PGo85SNTE0;=Sd-Cg>3{L28=(PT4S$2_CV%XktLFBX z618S)!&lXoEjfrrGX4fH3Jj=O8txRo^XTENMo>k}CV4XJG_R@GmEPzF>q(h)gyz4= z-)wTbuilw$F`Mq2NxEiSguJO0uV0zzT}o(pvHgu+=ltb)&^VfnwoUIOL|4FWe_F1j zmua_XH-5MNv&rX=6SH59%I?}89*2Ax&mO<#`Dm|Semp?ETp*t+!9D4~Hmg>)eyi0p zK{QKI!!(lyW!|d1!eq0{V8p7iX@_IBu&!e?>2$_>%lq@RSXJzcNZcFLr>9>ydQA3D6RI?L$hJI->` z-Tf99r(|fAIyK=>|7$EI^O?V}*GHFMmnq*lWCnj_3i(F+6(2)}u0l{3@X>EqZu_#` zGk=CeG~uW6-|&AF=yD@%!13K~UoY(wFZp)5S+A7HIA6=b_FXlBJY@TesWl3z}^sn1Z znEWzvcX`)Pf@+8Xp$o9Nala7w*)V#jz9>SPd>nX|y_37zwW0hY(;frw5qj^X^WCs2 z%v;=n!G}v7%rIM4M|;B$N|58mJ({xL6dit&fWO23au_Vpf%w|sDGVhG4Ee(hRqi%x z)Rc#MrZY&b^*B9OpF&88eXSnCe0uUa=A@-1Q1R_svSkw724qqB z>|e#7=p{XQrglE(L?2iz_2o2$`s1x8F{yYhAvD?EBRo}nH`%EJ?g7z%y;plc`}ZDL7?@Bi82G>U=z`zSf3e^P zwEp`W?n4L+BKU*_euA=K|K0uIFbnR#-XDW&Fk#fwxR;Rse>J}@hb-tGiFaaM`%AVf}Z@~t(}>x zF}bImt-TAsrx3;OJNUtS=+`V1kJEL}BUb>d4Q+;^E=J?7_k8;B3Ld#>dCU!phFV&dvnxU~=)YcQy88vUhp* zr;~sENSe8rI$JrqS~=L0L;E#0ad2}LqM(2d^k4t}%+t)%>VJ)7@A7w9V1X>qD=cix ztStYhX0BG||2NH`SN=5ny{M}2{L@gy z%G1nNTha=&bOCD;=3-_2+vS zmjCU$|9QWfnTvz18}xQfdn;FA_P*R%YS=JfA+xdZ^0%IMiFHBuMH)P zVk%c(2?HYvBP%JU;rU=M9RZ@*@vAvh4ZcH50jI;hjEIP_6t3Z|O^xC1jGx`H?u$ll zJaF!Q?+{Rs)B-Y&FN*Ke+$I(*N*X7NU78NB_A0C=zNT1AWO!_(dzjb!S`H;E z;0r?}=HmyyUxA$Pd63azHSND&6g_~oV@L?O zSnwHAOii2>Uo220nr!m(;klS|X_z)o9jLM%|ExOF8AY@if=;qCPo3a^V47*2P1f3qoq@XT($zH0VWoZ?)fp8dAU4)ti) z=KKK9;vPoLlnLr&g@tm1#<<Dt{k&Sw&H5+llr{6ve5b<-zG7SS#Nx~ znIUEbSPgVRLRdxGLjplAm&>7{q5c7TQ)LH9M*dE3AEG%o-~LP$={m+%7F_l|-x;U% zWlH}(w^(c-keT%pkhXszN-kqzyXInpnw~ADA=W-&LKb}@E2HejZ zfd1}&?R+UYAesLF+kE$H_n?O+^DGHn`0^#sA>@=BDTL>ykEMA}_;LyEVm---pAa2M zXctU&=v(bd_`!%g%Q*^(1%F1c@AZDoK|!kh$@TITt3j$mQ=Ojw)%v^yhR~Yu^&%SPb4z%Anf+YPhQzbDh*C$BJ z{bpu>bpCL*M5v{RmBqoSY!q>Gt}X$>bt-|J^cE{;3O8BU2(2U;dVkt_#p)ijlHt_(Wd7G~$)Wq4lT1x3 z3WMZvb|jhaQOj0xI*|i}O>HhO+v9M-zNGP|vz|j^6>1VXm3|Z785+fS;Gc~*k6l!k zaS{7eVlLqRdJ$>w=5**#vHeFF``++re#`46u4o=#%Yv@iTd>)k7&+C$W0Rgm^R~L} zP5qF0OLx~r7T|jSYqI&~_-MJUvAmNqy6cuK;AVM6{-7LLcqnZ1S7`Lu)andvH zcjWcOJLn=Xci81;eS@x4BUDr~p2xIX5jos?Gu`9opuS=-(|J8pT`$L^q0fH%mP(?& z511!*36C3Ag~y!O;DIq@CY~qboc3@qUBE|bh%Nl)Y*hJzq1V{DX8nbe_Nsb#p5~I; zT>kJxBlPLTq6L1Yb?E;t`zDD`&Zg;V<8-yPqMuz}tyE#?hiR3eO`%e{ev_2e5ekb| z_X}oZpRwgBuJ9AE&eNLg_QS>t!~2%KL47YAp58Pm6S?=|xV1PQbn$K_(CNRDfm*1a zW2nFGxxBj$17pFkjqgT^#h}lq0>?o7g?Cd#2dy^oY;u7+Se>-tfgqgJ;H3Z=j)+{s zC(Hcj>L~i*4LJ4nFQDcwr&7UPPq)KEiHZ88=ft+jHdrBPBV1;tcN*ODi?CVEedkjl z|3+j0a7O;oyn}8V{UI^mO=E;M5_nyjM7M-7#UrJfV2N2Lwo3mz^$bkQDPE;P0*;} zq7ZSqR@0)WO&iQBZp>9}Q(Q+asQJ?`?Wano0=5q@EEc{oG7*zDso=SBkt;rwfJwu_wR^i2)RHqHko4AhE z`C4a!(~2M8bmnFYF=Lk6ZkF4&X6dmzIHnB)+_U{d95s%4vz6}U{+oQKk zUvDP#scw!FHa^O%%R{$I_|xIiYaNA0T%JSvE(7bAIP?k?-n-vwe2)w~=Es>zzrz}e zh4jJ|@^lU=jm#Fuw)k`BtUP%+Ol{5G)4B0Rl0cZr!o@^>egRS+C6YA+r9Mo~ee zo%Jh8@?~tgTq(wqubE)y$XA4pD4FFA|IutnfN9Vy$zVdcqP%XYFwHq!&n!U`qT@YQ zq>Ap~pvB8EZB>|V`$F&c>K|Yh@dldnZ5TX~>g-{uTl-Mtk&c5n=;(L-lj$l6@?mA7 zt`ol1IHL1~n>2I?UFHXLhgo}UF&IzS+e^wr*e$-fDD3n<=sLDbcbn*OeHy>?V@8pj zd_5gTst`J*m;4Xt92y^T$&2V}lXdAMkzEtb;l8{5yr`;GzH%TQL6LEQuaF z_N3VP!PfDgXhRX3_&VN0*93*-w^_(xHL1zh4c1|YSN|kElH^QaQkCsEhQEy<3jbLF z4AzT~)%7oJ(S=o8unMDt{Ej8S$)Q2B{tZ|OP0O&<|IG(XknWHk|265q0j-dq zdlga@9Q~b-iW%rnlNmrcjxMhnysi6z_|x?LeOhc#%^M>xK)oXZ!uz z1Aee*{To%8`uU|)m?~}MR0-85^sr1xq8}u5)j7k}7CvjFwtn9Iw|s&?Uw*D669+Y~ z&4|j0&79#|_k3t_XxtWA-4d-Ze`f(?-DEz;KP3;738hjG8RM3T~KkF`65=GJJY3p;mDjLZ;^cR ze?p4@7O+k=9RC45lQhI`_>@{D8`nI0xfRjI2!7=8us~u+Vr%&Cv5NFj73G_}nMsWZ zxYZVxdycZ}2KAB)iuHKyq`+=#)cdoWM5~apEGvpC#DhsYxzOqzT9u%sOBc{zPyct@Q>bvjpM5dmQpb^d_t$JV(pl%pY^ErvpH%)n9CBDL zu`D;mJn^uSA+PZI&kn{;mHwjx za7c_E%KgdkUL)mEfJ1_k1pDuhoFiNIH~cco!ugXAzZ0bgr|v&z*uOE(0+;`1Pm98< zVS-cm9|OGNF8R+HdjF3Oychnn14{wBnMw_wF|{R;`oiiYe; zL5ClJ4aGK-#RGC)qrCa5?7Lqp>emzXip82TJy(#SAmr+TZMW0?ahx&nr|6R*cyNW4Qn<5SYqp{6^wf-(%jb*r?gR>B4fk%I^8=4EOuH+j(HU zb(Ttp#RWHjfSDjitr@F@nf^QezqV<3{BJxxx=1`~g)ZhsnLs9s_UzLV=6Xz}-R#7- z$`QF}j~uOUPjp^-f$5M${QyZakJ^GxdkHUaggY^!^XCwN1aF@^se3^INtE}*tX)ky zG{v5eQx@K)R?gV7uU&rV0dma=qiqfH4)p?s<3?@uIU?KAz**-$CL+NnacAJ!?AbT$ za|dMX(-o$zFg-?t)mr+la4l%P8pY#IWLK$rHs&^IFnLkSc3OioZ4$vW2aYYo#DUIB zOBkG}v@cN)`eEf^vFlPYRp(?#+%%qbC~8FZseR&Tie%l0+}0gQ{tgAAK9l-lvR$|} z^J2fLrXPSeJRRXC*czlMoc zP^i2xcj+`xZAWTk?q9olb$dQ}v6>a+1#*%(iU`ILa^A*4fZ2M`$b{$o0G?g2ja%&jlZcfGcn+Jv&(S5s zDJP4uy>Z3uuGkjt0iaOp;%N;n^%mYRVSYv)@_oCNRt_6v9%y}w0jDXE+Q@S z#i*32*M*z_wBOK0>eDa4vky5PT+VO5eg9167jNBEu1pLyuJ9Z%d%v{Oo4I=cqOXln zeky{|F-jCvi#SyrNz(vSzg@5N3S`O{&yF<`ZVw)Ie4{)iuqkNfyI(aajV3bagkfdE z`Ru!Dui-9N_^FO%TF1ur8m^V(C>U)b;h#$X}i4cN_3(iqKu%h&wf=> zz@f?k9@&L+49O{D5G$kvvYU-_>aUkETWPhr$Rc>9xTHM8T(^eha2&2D#F$nMER_H( z)k{=J_bLzp?_Ip8dh%9OXK-&iX{4soneJ9uj&{)_)o_i2ng#DKFYaWWw9|r271De0 zvsJ~0f-_j0ezcWuP6<`#Z@z;S(=zljbGUoOyVQmu5=pO&^WCY7#G*!0Z{%Hh%UIMi z?C=N+OdZgIw=ruWkc-IbW(kwW=xG7o^mV} z6yA+Pzj0h@VMc8O8G9c}>fOB$qL;tOHW}_0I{O0EA4se%H$5ES&`=BD+HF5EE|65I z^i}8|B^t7CHLh=OCqhXO(L&fw$Ylgb-7wTywDcjT*+Slh<%othD*j>)v1%`p75{NN zHVL&SygFMi;urW0YeL*d&teBvPC z!67Lhd`0*Ici753Ym3e0P9XQP7RGq$?OE#5t5Dp)N)>mcLv%7id|HPProg3Tmsz2F%I^;1+ zJX12+_5|S7;L^aBo~fQ0mO_OT)-4wzA!U}JPmC;=t~n{5O+#ApXy7KgJR z;2uJWiEE`vM^!vzeU7z~Z@U#0vdm;o)$PMi||Cskhz;ma%^5c_gY&m8+B<){+m#ibJEy>iJ1US;$Vir&bg+i0~o z3mv@cs8jhsf6hohQYxlabmISW+M+QIKPm$dr<~R=V;5>iHIflS_*E8+oA$k-b8fZM z7IumdC5Ya%_y&=vUam+k?^cQAT4=S=*|RwI-=xP_NdyVw*u%hj)rF;2Q~dQPx|CjS zjnX>DPM>dQO52*lt5FvtZM@=f&c)%G4}&|noL1V8em=aG?M*tCr+4)3T~79SOLTt- zbL!7Wc}DVmjF4x5MhF|5qY07QUI_45QY}kQNM-N~4zp_C`$iFyk+%iJW01vr3agjo zGY#arTxn?&%Z@(s!XHR_8%8+@6 z+lK%cxyL=@vyakmSDc(9oum5zZBClbzmaT8$R&Kr1(QpJI#uSu?i;#V6Fk|<9vM1X z23(9CmZg5%m^r`SXBC2ttu_o5)=9pkVN?)X1h83FB}1|zKPXJh^;I&!$MH0->YK<# z8sbKnfrXF>eXdRZko!W(a7A3oA0)H&fEVgyd5G1V(ZPm-5^UEBxu9ftFnLVhbv@sn z*4%n*t#>=vI+e(`#Qkt+tVk|A0i!pOYi_lbRrG`QTjWqdBEm99Vw!|c218X`EY99D zPG2y6_mVc9=Z&_m7#9!4Ge0>(qXrc;YH<4fIt%mWN+wVp)(9^kR`KTjm<|#VzAm<7 z)|w2=L6e{wQap6Y7y3?8oa>ASY{V?2#IcOUz$6P71#Pv=Anm2KTW-`St#(9|!z^*@ zgHsOyp8a0R>ScbFGxDHo!E?+0;g>I$vD8=c!laQGR`O7dnH%Wrh?|b^WL|bv@^Yx= z9_h3U;KhkqY5v0R(GDBx4XvBJiKIP5O2qHXkO)A^47vs8%SXPQoTwI>}_ zJ7%717i(l$YL60AiRFi`aFsZEcN?>WEnpiqD>xt{gc9SS*6kKI_)(8yC+{aTx$v^>kRtmugJq-jP#Mt6DWd{jwbpm`H= zn1}I{F^XX=>n-|;`r})%J)^`!4@PSefX~N}0_%6Z+M;^*rc7hbBVGn&loiyfW-y|+ zQc!be*f!#?DoJ@m1r3mTE@tMmK4MVCQM{BOhA+1=tFEe5DGeodem?dVZPg^7F<$1w z@Ts{fw)b-0lQO|rLw~k#z7cA?kX27YMg=y#wEmxq0Ri`r;cUiY-yXzp;XB)S!oJ#^ z_Gy_t!;wdDCsv6Mi<6$Q;8_Uz(VC2O`TF=^AoaC!-_WNwN^rI-B**zp8HaKu(}_+y zir8#80G$hu(}_qKF0j;Dr65u1D-pz{y{FfzKaq7)a&Y$g-Fh&B!JhC+co&%n^ZG5d ze;D-qVOyRz2M#IDdi6zF3nVD!Maw-#B(7#m3UlTO&84SVff0%5lFfZ#I(7r3pqmqI39KWW_a^K!}CPt ze5m&`!z_}o@am(dSg*!`y@{j}GVreB4W6c8>!*Eb-#};YT@MtC*x!mp0;S4}QwRgH z^pG_vykqGqqPQb(+tZlL%1BhxLD^<^Jd8q9C<#=A3;2O5H7q%qX(ZP97(*^<*VLM* zQ~XA|w5iMNczuSp1SZvRZ72aI2M9_7SiyN+5qO{%)opD`IH`R}99z-Qxy*lYuI`P%RV>kbD(cMNk|GePz7?pBa$!VzBXIzl>P-8$ih5;oye;V{~G{X9VM2 zRVZ5nC2SA?%gIPfkN%GYBnO{SS|@VF{p4V>#HiirXsM-G^NZFG zr{Z*|V$pbicYSvPNQ!+Zm7_1__K(iB&UoFw!DS>Vm-W|loWS(+v7yob`rH7Ia?%%* zF7U5uhM}gFAidiH%Aqre8}h9Qt;$hkA`4MOJj|PDqXmjCKyr;s<#jR{WUXU+5j1p zE}HjTjj{*a)ekp!-VF(fEcqnngYIWBwvOp`F?Fe&Q90!-XQBe0p~;+^jU=$fk!7N4g9MqhpS zh+xS)**HYn0Zzz14-ixK5lq#NsLZulK7h}dzr9#I95Mo$zkgqI6!%Q`H-H=M0bX#0 zk_kQ_V%$~%1*;zHhsr}3>N+Uz)p)rQaW?~vh)`~* zrkDPUr13vhOb~gj>O>f%R45-e?OY(MQr%PBF5eZQOoK=F(@@EcW5BbGWHWl97g&3D zgF=s(S)f_&B|g-PKLN!fBq>civ$h;vChYx8JWyU>9>n8Uq4t3JyBP8TO8iaTa$BfO zFA;*@-PNXyu@(8tPM+=2!WfC=OhAX-jR7Hk-yWd4hOfT`<#(bSR4S9b*ZE0?^q;gd z{?T&01RTO=!x9F&6KH;yc)FHPJ!Y&5uMBw}Y|rQTt~NUKuM}o@Tj$j>8K>Ho%jQ9z z+%EvUysuU%6_|Qb5cu*0Xv_^@x6hD~I<;e>;nW0gPgfKkF1v_C;SfGKyQBKAy=}m7 zD4r0{=h+eQgLsrTU?~uhTWt=HRQ;E(&9`;Mxk!QBBYePxHtf}a%&RHYZ9<1AZS`d| z-^RN^;{zpKOy+H2`(Jw%LpeS?^RA+Ex};nXZj<(RTd{^t!Yvi4$5!w|XSF98AOXd4!0vROB<#ss7khAIx&dLEUTl^lz{r&5J7(Zo;o zlUz2z4vx&hz;$(HH!d8z+D2C0`Qx2MqcAOh^!fla6Vn;r75QY9caVykCUv*Yd${g^ z>E{hBD&tc6T5Tqsd7bx`@l6C{`E$>rKWa>%`SUC(1_zvUuN+g9p+ZQTlS|qZ{nH4R zrdl6BVczB*`C9z?DEZgv>&6h?`b6YEzT?~iRBu8Y+?Q0L^D*@wZX1p-#9PnGROFex z0KrB)I&#VCraXd?2 zg_+iNAPw@M9qp+T6Fox^U=#^jjIWevF*p* z8|X|IP8^M1=CdYJu6tCcT&LR{Cs>;Wo=-iwz5}xP4NIEa!Q6C7rs21N!ujs!%0!nP)$AKE~oE!hC zt$XJ0bFK;Kd0$&;0{!Q{ZHxSe~LTn2Dj`gDF1Q=To4;HYH|LY954xGkc<33 zYd8N>UCM)~FRQ)$mx~C4{&tcA)&8U^kRq+G4$$v^W|5N+e_ki2;Bp)c_4w&O*|C?i(3*bX)f!H$&l{M_MgYlf9%EB(F!b2;77=M6L z&lez%t_9w`XMlhA>+g^TavwKPij#ry-0<@Gok$>*EGO#+nw9-~DZHzD{2kjvD5(vt z%Y#ZQEca!mUG;!)97?(X+QY_#o`cY2fnq9upQd{c3Ab(XCOecY2jvkmm;umAHsFeX zcG-L_la4UQ5&NMO@e~?DKwNYK@$k*{`J|Bxo^~iYiT+}f-#l}}-impOSz_AqXE$S@ zULOErzy(9_Nc1fwW6hlN?*M!I>-=;RL+DJU=CJ9S8)-BRgIe+98iVc0^E#@8^fDb_ zm3a`Mbdz|&!8C-P#WlrisFQ@kugNbjSSIV;`Zy8|0JGfW1V>4@L(S53nbWXW-gmRS zU$c0SXy|R#koOIV(+Ac~2U{VXe|#R$uLrN;&@J<7x1EMS&7^7zG$AtpBOR2sp}Xu& zejHv6XE@N6H{}}PFlk4s0YxO2Lw*p&r?=Aqg;Tj=A5%~FjMs<_lFfUQLF=% zd!q)v#~to+pnM__PbrJpX8}a7_qkmrBcP(P23iaQ7_|{--C1StQ8hD1g zsTYaRsz$|>$x_l}Vh}a$V}yQ|N?ZUwS`zJjiup%6{*v@&WbJ2mz+o3NP-^3X@IiI~ z6b#Aj&hqk~Gd2SKYx^@*r<~3QS9-@$h=4I--*gos(6^Y(DTQ^VFjmMB{lBt(?N1U(^-bB!^~} zg3+<{BR4{c<)W4HTYM{WW-^HSbv@pcb{}z??vTechkUwYpT1v3*>VzRklgyAJG2fG zF*hJE*4vDK)_kki{J$mUgsaiew7f(g5K5a58%>^BDvrJY#r zzQ7@5m534uQSM`@df;5mg%H_q5pn;nUi2DyiHBq_3`qAziq^O33kDy)!)tYeKMdaH zp0Rw^d=KhT>wxY%4P=6x6H?ZfuWPJdPV=60KYMm65>8yfvO1BSlAL<1JZ1d_kf+%x z;h@Gz>r&qzdm-$l%a8r#kYJ*{RS?+gwT)=D?XiANmQ(51G8-Bkzx6 z0&0O2!Nvcg8=Y9e7^)68Iq7u!HTqoqJj69N@><6|gy1zF0%^`QM)+5e&;9KM$?94+ zRP@#YT$_kbU<$s|a)wPw<3uL3HkKrcr3o>96;$Tlc%5`#FM>+UfXMaN(xqP4F3z9w z6Hhj7qIedjLdybtF$gxq3Fwo7(QiG1ICXC#kD&#pch?ZW!_RzEw1?{3p}Br>l(M)XqSj8b_ z$+vdDdbXa#ed=l9IehRfNc|cBXzEO&Iz&D12JUyYasLw{DAKL8g>@^0e5)lOVs=F# z!0iMc^Wai#cr=RP0Jx2jbmv|=Jh#eqc?pCL#QXs8m)pO3yI_ZNs#&PjwTjM);>Dk$ zLrhe+XayCrX&vJ2^c9v<=}0;(j@b-onR)Bq7t}t+AfqS3K~JDWd!v$^*y%>Ut1lk0 zc!K|vR~jUr>4k5C4D{KsX@nR;IBofrZ}`#*Ri)mBp=3OL3u=X>o%B5E#lQG&Z>MCv zj0L-SUP)@!q%R&R673Mpa8F~wDezDXkz|cX_);H-p8GuJh_*u9Y*djaicooZ80BL0 z3*?}F7gGMNOJ+LP@+ynfQkzZ7R{V+1lib=;6z$d4)}!K+tD%6q)n?QU8I#HBktMSY z#3&(RG)2oGog2~n0e6k+5(6p~~?%<%TOT6Vs0w^S@&G9M53NOgn z2#=Qx^4|Bb5~InR2M4`r75+J8n(M(0>GX*9$dDF3m-78Z)96D;G4>=|g<30G+mIP@ zBu;nqSOs;igkhb8{y}H?=yT>!Jl(_Wl>#2g!@#5_8#~2tGIR#iVWsx33*hlbn)Pag*r4V z7qJgVwcr*I;^X-sKXwbn-iga$*&wZ8mxD*)y{UY_6-=b08PrwY$Gl4vaEA5m zeHbpHjT|rWtD$nw%jjVjXka3VYQ{J8d|K{q+*7|CLUhw$dyzkXV$fz{(&~eNQO%il zoFIBk@w>JY9BUy4R0qAUhDeF1UzNT#9|1jBMvHvG&inkQ0X1UCiv5!s6mV`%?R$n_SuPE?S4rGcN&>aGV&Q6bs zS4OMJ8fM_sfvd`ShBe?i&;y0O3?>Q-w6yfLI7(ZXr1OG)p}`a?=G-(Xq|>0ld41qj zki?UhCenPyAEkKxk6I9Kg>jR-`mVA0?%vxMG^eN-hct>P>?KbDSj>s4R{c$LCOMYv zn{Hbt{S#6J1b489xsvaKkQ3QC@6MV&>9Zl9w6Oh%r@__EZXEmC7axMt`wHYpVFc__ zmFfz#LjRTM5xwv+?PVsSD?h4F}On`Xg@VgfR*!pF4Mi<))Uy`Kp7C<-olBD$y+5t?T5mvfA7>o4?RD z0U|HDUVN+M~4VK-i=EQjW(&44CA+)A39m8I{7Ve(rrK}gy@x>~*_I^@!;755BE z#A{%gJ0am{smam~q$4x8IvRwQge~k1HFTAw0DU1d!-E+)MLYS(=Shu{HbHgXH$VX{ z(}@(=(O7C{CA-Kb`!j0Q5lYq_0LUr^9wiS8gs*YfMSk_j1!&_w9b#q6`#q>+A0gUx z^dMK<^DOENp76}4Bo^+jsn>3@(aGo1cz(?LiT252C3}QXsE#&fM+v@5npw(9R(!sS zRqkVjWp>=s;fJ(o{gZDza5E9pekq_lk#q?#aeN90!mu%|AZ9HWX(mXl@3dDg z_b%hznYIO4bbAgO29l38wbQ6z#Hn$AcIov|^OSPu_xVecFeV&?MIx_MvHjoPQ z^xYPO;O`kMr{w!~T^5gO^JI6$APP;eEeHrJ+`ekOWTaw@0Wgq5bIi9;ajOb{r$o(i zb!cUx`s#5J2$#_$rmg0YR-laEvYa!S*|c@4DoRN0{YMwn#%W(%U0#xKmH@6`DWq=?fGYB^JP_{5E(wjn@jt`<@$nbI;KdcOT6i@WwP?;DG_5*^F zn{Z<^#Fp1$5svudmXqn5&D-`!QVUxr)(N8MXQ92Vt?@oYN^$buTD)tjy&nAqb|;aS z3Xgo4CGd3Qlg$Zr>jYa=_n_4?x2xM+tCR#Ex2$X_ufkrX8+N={gx~2kj&j;AmFi96 zOxmo$MV)LjGBx2(LPjk}E}ar*ZjmG_&Lz=>ah_KnJf(GrgH}L+%DXl)hcKhcZCiy0 z>{LV;rY^>-o2Y!|*(h;PmQeN6ORXVY>qZ5g@9he6+Ai9`U+Ba>O^3S&afMR-Y;#)i zRnk7={FbJQ9vrEP9gx`i5%J1(n>rnzG0$w>1^AE`?ct?&Kd=?!c!C` z^yA%S0)JmFn%11!xWTT~9yekwVcFdVb*p1k2Wls5zassW$-Rs`JFu(J9hcRLBMnad z*LRR}YV8zZP9kWWx5P+(4kuf;Lk??)FCPUQ3ZCsmhcaETc_~^ua&(0A4IxpV<~?~7 zoX2$FmwtAzZ08U`gGqzJhS~yaEnfP!9!0c&)gv4&?egnw)-Ds+l!Bx!K|Ei? zw-ltPp1%#l*hW{3oFkGGil=K7W8^bODHQKi3v7Kul%`VK*>e)AW_ss9JKx}O(FD&~ zxzuyrmuA0!Aea*dhni4UJ>J^NOJ6r6=$lrNywV+uAH|ypgp`1LBC59@0!?d{u4Q2) z1joxhL!}r|9)!FQ0pf~E%UVK`A3wVPFW%lVEXp_}uMLHGb+>f#UYwz=7zc|-*yw)ohGBfuxPu$=5{5Cq6KO97@ z2oN&AI`A;?i+`wV&2r?sOShKN#xqkJUOy|A zp8tM-+#^Ns%h#s(?nn|syzgbDxnE4&J72W#w4fF%?a|o~uxPVB?y%2ti+(O|%c)7d zZ!ygKh<>JS=n3T%3ATy$Hi9cAOIdFq(>D~0SP7?GWa7pr(iAkT9Ooyno&wZ;LU!32 z=Nfm~rxBBzbzv?OQAMk?SjVj#H#8*G(N}Lx zm3YEYbq1<}c&k)iEHYk?95j{BBl>3dbutQ?FA6ekPo_VoJnyo0&Z>^s=x&6~6@@<9 z@ur^dD6NjPOugxS+~}<6#U4ZcBX=I>S-!3kL8^N3Er%Er?zn-qVwLACQdDT86XVZ0 z9hcGTWZ&$S2E`A}&{1j+j3h}I#UR5UO4y~k*y3MH@ntJ`U@CfOXlCMzRMYUaN!+x& z$FGE#T*V^V{DfQ~VxKu$+Zxdu9UX}KCNe)x!XmbuZU?jS9}mW$G4E;?!y?l^)IyQ> zWg?$4Rl96FkH$F;xUwd$9p$~?dzYqyiRZZeC|@RVbBud)C^6;ueC_oetH@Aenof*9 zEw$ogMx^}wSEZpat9bFSK#n+#n_3GBJ;)_h?mhBLKOo}0SH{AGyX$#pp`T?&q+ zHkM5a6-6uREo*l(K2=4UJ@go}!@Help>N1=q;CXz_3VX&cFP6I;>94{F4r1k@%UjBz0@&>pZ^r0EqwcPl|yzQ=0teC{(+;teYx$FJjaTTe93~87^q^w58ZH74=dKY0W7%e#BHZih*a1(yVVY|pW%%sA@U5EFpLI8t3oN?E zZygmK5SXTyk;5^SF-7Ib)^Gw`qW2g#RHV@vdYkXD@ci;!c`H7KfJN+~Zf<$eq?0h$ z4A>jPX;_wzT4YHp#8%oOE<}`bLLSBcV3ht+M?0jXX)}JZsgS?h-|j|mqjJYkliPJM z$vv07C8mwus5;aQe!+=Hg{|KhV%1)Xu#}u67kbfmBoP{K*`0O%^CDJVL`z7a<4#-L zl4-k~^~|e})6yA)J!NWyx{nfBC_|S_BAk^w@brqVTP!5ccvEXCD=A>w+_sV^iiyIO zS+Gsz!wz-IMa_&kbjg|BZ=wc<3D}(h-Xv+UdWBGZzs1@h%))pyb2nG{C<|b;oSbo*ys-j zNBLe+;YZrN;h)m+*zEC)r~VqBe$bDNxgN#xdu_c^BykIa{N+7j*=q6uauM(CoOxU0 zCz;B0ZKLQMZ_K7pFJ=v)%m^QN+f7J50sN@H7WR9wpw^r%qaW5OQhzme7!G)#Idpzh z@lR9d1Dd*pbosMCri4ts2naL`^**Wvk(=KEhFPG&r(sn-_-7Ivf&c>>j!xgdeu@PR z2q^H?|A|-tV_;Akm;fS%1m%CttD!foJ!Vg&VswssC629!mCU$kn2DZn>AY z|9s}jyTpQyMQ!IXN(=iFy;BR?QwSU1cq%a-`BHASrO;zm@@ZYiajVC&kS$a!@98m6wbJ40^LZTmbJG;Zj2meJ`^kdPc9|$fvNGvNbQV+1dCIujk_H(aGHxC~#4YKPeXv^m@k+ zb0^!=44gqYcYL~(bNF}<^jUK&d^Ud0m;|%{OMR#V2rR$rD&VUcmE|@nxY&cqz5}_; zP2D%inpJ>2|KLYtkqu}h?t-xT*_b|PC>s7a{r2-0RLX|hFlTA{wv7A(n?Rvs1e81+K~5q{U<=b^xB<}{*7DmI;pn|n<#V>%CsW`flizo*%cVkR#yH|4 z!1-fv#FpKOIWVC?z<(ykxd(k0yvvbyP151mRZh#Dj%h|7E>Qh>)OqBj2P&ojJA*5) z1L(+$tR2Ub{#RfNfS|z_Y=KXKvXNPoo*$#g#r@mc0Bm3p5l*rBG3}WI z-R*XeC~yS2iaR9y(ovRLfwiyaKzTBI)CsiB1>jba5j+~p6$J)^!&WM=5oT)5~-Zd3#l8w!nG+>J2xv+e8-A*;p)Y$6T zQ_oS-NQNBu#5Ty7E&9L^kuVBUP3F@1yUF}1W-ppKl_7sL#yUS8vTFwf_J9cXrP|EV z+#c!xIO2B}=X?gPhz}^m=<7EsjqX*{jebcr@xRDE*mraQ$EQlC`t`sEYnfGpd`~t8 zE+gP$!k$7VC$JJ;!m@5rdYL|7jYWYk>j6iHW@7}|ADDITk1_BW358~?@Ih$iC{_-S zL1mbPemN8!kG^@LP;Lt|ah5lXQB8^?NSPA_hrIF`a||)VVU~9D8R*`r{2ZO5%WE&w@TYboH$&k7^RrrHI(&r^yY)%KYoA^*}3m_ku(eaH%s1Kx%B2f?>+Jnk6W=h(OezSr6&a63LPIfq{Y zt0L!@*r6@Gbp<1tV{<_O8#j95e>85lRpZ>2QK7HrhqJ&u>nQb@?9j_CG=xjhd4cbJ z3s}J=lkXq^`^_>JX^BbbVDss6jq1ud`FhNlG`1jZ>-2lUq5oyBrye*%Kz{X$3X-f*^z>ulCq~ob5aWQKPvrWR zKygVn@&_DK;UnQZHB9HX-i`N%@-wTG;qN5#I&9owYs~= zQ6h_sA{&ogL3-C!j@&BeiMBiCMvR>3QxwoT58T6jc)euLc@Kh=h~J`cj|ZpuPhZMj z4uv0YtZb+RUCo2M`4Z5U*EF1Gcn$M)#ZCziZY~tGvp8ptwg}BU>vmn zEdfU*Dm2Y+@A3_vs=Yf^kQx(kJp-@y7eKb?MR$kp1MB#^0WcA~D9k(nNtreBS_!q# z`Gg2&@U`*>%z|e=Y|N;8+rS6etQ&7}+u~qGo*~DG--~3J@RmW@OgEFyxku5tMQjm4@Vp zKptYXM1~m|kz2GGOGfVCQ%ttcqA@PoQueKcKVUf!%VY%XJ3ABofpR1G&8M|(ud~gw z{fOadKv7>2pmO*x5Y&k%?gM-FFm3;(Tmp!cbrROUU`<6gQ?8CNzJl5;l_+r49^AVt zE#UmgfLtq5(Sb1j&q0aCtKDA$A4sTS@OX>@UE;HGvbZ9uMez94lKYf0h00?Z+35Yj znJNl!n(bD#?Yd5r$*j1Hk_zkIa(*mx;le((?nR%P1p-pj{HfM&Q>4Ry_387AkUurd z6;m6B)UhOxu@2bL%fil+Z(nisx&}9xgD_WQqC2=W$(~gLcL@9I8;~B_B@2V>XLH^l zcC0>Em#BM16wno^6HS*z=tt09VeBTBB`$~WCSqm9p@xaP`kn7}4(7m7BN5FBK4kIOB=1|vH1;Lf3q1dP3PIj<# z>A>LZxQf(DyiFi(e54??M}g+x)_`6IlGxs%&mLVXkc+9WP z*)erFijB3Ni&1=jjH|q>ODipGRMR;^L@l4GaslpB1)<9Q(GW|EHY%l?7|E2}SYn#b z*HgljTF|sl;fD$|2OQ`uaY8 z0T(vQ@LgbI%Dc}=bx#Cy_Brs|zj{aC_?1~SS>90X6FtveSM-DM$oU?1Tqh9j@%TJY zRv4Tt%*Qw4aE=iYIsY7^qf%YBCB#Ofq?z?jr2U6SIt{(@xRY*{*Y72P8bWo@YU7a{ zxl72mR>=LBYwAbc-e@^+O?POVCB35{a{eiJqQl0+`u;d9D&Ui9Z%A$>qdGp7^tBDb z{_>IMl8{h+YLD-jPUyB^9=(9N5q&~~Pnx$LWh|2Sn1ZlZpaBGPpj(E>GFm2X=k81whI+Dn?FDVjRf2ldZK`CODH(i|=f9(3s^D@m2gT#DRH zH{Fo17Fb0k_0;dhJ%SiTc~CDg{DU~iEPT25g1Js`unV7oK$*>HLn?ub&~`GJ@ov&{ zL0rW3XY4+LpmwIGh1G}%zOS{8$mf82i7gCp#jE0SOgkQeaN(@u7gn2EPCwnLyQYR@ zXp&|F`1YE(0~aAHNrzE1X$l|fn%q|SjCT=&T$iRDgZq^8O~#x9x>%C}Auc$n!9}^O zGr9&;>`rTM;Hdq-GAh+nIP3Vwx=g5#Y2KbJj7Ns%T|c8f7y2lsm{Wizwod&HBZ=8R z2UO%??9lpq*=Y(sIKGzA|L6tBw7n>Pe`r~~bZ9q@;(C&EwCHY5&R%Cl6>o|d{)3ns z$>oAs1V8MPPf~P@Nor#Ur0FP`y3e1)PH7(_^ww=}(}{+=t#pv{r||XM_bM%Yw8V2i z<5ijxv3K`gclKSEWag&B&9=LW)E7A6R>XLvwGa2^E^j`IJ9~zsNyez^i+#8u3J*R7 zTZciV-!4R$AJD9SE^JEen^N|bFNBy-NbkSkeHgx-vSxViRKV>d-)%9g?_&R=Wwnl@ zxCn=`g2V|UFqeMQgav1RV0iG-j6DBUvfAu?9cGNU)~POR|CC*3RrFkH*M{ki7#_@7 zbBTvfy0pDycS|U$yeOdt5&od0A~O+WN6IAc`ZDhSDwXUdMIFA=RW{~*l8mmo^L81; zGnPLYQ$X~EF3rmc_FuUzNz9^^Ms{gKoDQFeZY;jv4!9^{bUOAuv-GP3UW$NrBzo3G z8R+uLB*A;A!cBMPhR34w=0wbgpIsz3J$fv12Tj|i+cr>BXEI&TrrX>uam?HS;p%Wu z!oLwQBQ*7Lb!x}@vpFlGw}P=w{HRy25(}}^9y9A2{Z7=TBG)eEUN)mb67L<7G1WCw zk|tW7l--(k{sm|0*Dy8Bv-Q?kvpm5y+?%&#v-kIdCDH|{H@>NVHi^Og)YQC3_5JMq zf}w)l)JtWRjS-$2#;KC^)`rc|nUr4AwMj&Etc~%R$K&%)Rls`=oP>2gp9bTIJ$uU40IOb$(d4?=Mo8Q6A^obE#zZ!tkEZLg;8ps!HxqeK~Ex zR+g-FNG3<{gS36@jK`gyo?`lPkY9bSTqC)fkfY*lB~Sus<80iVQrxU4CwzHfu=BQH zfVxVg>F&y(u{eMZ>`o_=QRY=~D+E2=<`Mmh7L3Uc@PIHiZ8(TBKgHO+HniMgYOsJ$ zdS|b2t6Q^3+uqxp7$s=MV%{tw=t?IVX0OXA;ysx2J785z?~{b4fyqX6$~>>(|&g@=_~w(xh> z7xfuMuy#<2LrOC+q3>Fr)AsXfN zxe~WCd8xBFyjYkDxsDs~>~W!bKa-7i$?ynYsGMF%3_f8wLfL7uzx8vn_@BHhlOaqHdQqvsIoL#6*euubuX8970Nq*U{LcMBr{KUQ&(%Opuxz* z5KdFmtCe`D>!Pnqs9eBr=a#nZw;zG2VhZm@z-ve!=dQ1Rfae~#f5HpwVSV_bSvg2! zdw#4e%!MnBR2*fp=ddC@-+|PKnC+WbMW6Iu2b(1?avom? zHj!96=fZ?A4)bmX1>ylKQB+7ZIssjY*@gI87{=OrSfcOJhvon8 zn84KVS(eH8^BF=DRZ$xe0sjE)0XY9^xL6OTPgK^*HKs>QE50h8gt?W1-$J)O4y;7k zJXYxNgqOf>Yl}x+Jtw38{MhTwRClC1tPX5;_;YyzqI;=TG?N1it+Ultl6eP3*w(RR zZAyjvrikPCw~3kK(SO}5z$3-rdsl|t$phCzNqw@1JFjne(a6(&t`ZWPgYfb`-$(bE zH{4q6QMBkd*rz;2D0puNh39@daQ^+EOdg~?WpF-~8O#{HL`W|NFVcGI6|#m(V6t1! zSTDefpttfSA$TN!RaWLx*BM)v^K+O1%Q^$y+soWTa ze|v9*=G{EX)MJ)QnNR)fL$w~)VkCE2OAx8}uSX*!vL*Iz{W_J-tdw3OU|K^ciT2O8 zUv2)xE1bQ0_jBlrgP8Z_a-BQn&e2&MHy!{FNic^cS?6Nygub*nJc|Jhd@lyXPeRR$tBWEIxN+uh8J9{E@vVASrW2N*cKb zwh+A8QgnZ*RipqdQ+@M^sOH~L`@eeDA7N3AG^nxiitpM?R+DF@p}`VMwaxAbf%c#< z5ef8I07(nNL2Nm#=T`7su9ls#m(w?Z)HTC0N&TPtlp21}6Qau>1CI3Y%+D3!`dTu) z-gw9dPR}-Yqly&&)6>F#Xxc{C=oU57n4ZP;B$7-Rk)6e3p;4!{3UfG=%6&_Qq^;JA zXdn3>fiCia^A71ibJTM48!Rf(g_taX&KCl<7xei2$YG7l5u7A*^oK5gN@H5NU4aER z7V@1;Txk5MR_IkL_c)!ir-;}3wQ?=u<`o|f&dXc>fq&gcc_JlYw)8|pO|j&!eo72T zS~LZJS{uVH&u9 zJ_f#Ym$r`GRBMdgvZL%wqq4z_?ROJV%%Q@=VA=}1SQLghqO)dL9gEGdFn4mK{O@-J zs7de*7(f2m@HgRB4~MxA9jjs2V{rawNs~SQV4|REoh?dwGOqW+$X#?%Tw6INo@StZSdU&}h_HDjuKujXwk* zUkwci-H!sR$ww5SV^R__JCKAW~mGU8UM27J=?-`tgS2 zm-@1Z8QTN%Mdm#K4IP5$rMqC)?F=-y!XJRXvj8sA+l0nmh^BEV%oZ%Zk6i%FQ${$e zL#tdU8{-PaPXnfz$}3jqnw<8|K=>7h-~8>}*RS7eD76S~rWZcgE*l6BshBJhU(eYT z3kC8v(8|dNyq|naA-)LirG$%^lrwzQN4^G(klheG6=(1+>{L3Twip1nJpsAss4URk zat*VqBFgpuZsQ5CsTVto{y)A!NTJgopu-aLb*4IRSsa8`7eOIx+7LH6yE^_+=39tj z(=^OARMbLuRO8D}4^RKJmIIn0+;JwjV7&mJ(kK?G`hNzj!#my{tPQk*5abewP9kFc zB5v2yqJX_v!MtUb!)5qh45*Aig_@zyW(o0@eF;XS;FUQMhS6QK>bSLiqd9 zu>eR)QbL6`uzMb$Y6A$KStHkXbXCe(d=nk4Pi`Hx@VX8sFKOr%z2ld@qw9kKvRrya z@urZb2U-G9*U$12a5y1hb{-Vq$SlbF+(C*=xSs;r{|ESu=S%wk-&D-RHFZ?z$g5!l zwQ^^ffF_!{Kah61Wcn!3(mU%w8Afg}>nQp2oT6N5fGVNYrosMiK#{ zKsMsg$dud-KxZ$WntNcmF{j>1?IfB?tD>V*TDEfZqCc4#ZNB$rk(JS|^}>dASwi$((|9!ZBg7`6B-$bPW{|w0Cii@c>LvUgcefQf1f!=7QCeS7t z5RU!^6wCyE0YurgT1q%*{8ctg>pPa_c(~3OdFDWg-^Ch4@9ouQqOT~@w`^E2ARv!0 z$ol$`7v=DTNudckf0D@4?g@Kp(haq7jursQ__Id#0FW~`0SWhae0rb;4Mp>Je3~uo z2Ej!Kc)vgW)<5_X)RfpauY~<-8K#xLm%Iu-+fFWWlVmKy2e}Cabcd`!y~2Zgm_vH9 z(n4HEan9=ulnxxvz()9?2un8*(aPpQ_HhxYI9sYQB;5V4(_p8dQ)j>dnw38;egZ;N z8+fg;J;1Pv@K3!dG6sGt%)YtTGkEXLtF2t0R;W9;cLhXYe^_L201!bPLkYBMu7+MXjH;d7ppf|_cnAnVRcpe% z+7D1zVvSa<<9}1d#jB+cR)Z(SL3xnEB_rS5zV!nD z1TTUs5HaH;%;3b0V3h62;wWJd{05D^Vxot1a^wiK$ix0Z%7QIH)2?gFN6#FyJ9|4+ zMGiu)$lb>!JD<|WDWf0g&4FUu?EUk-*H^o&qz8kOC@#7Ecd;lgafZY1uS*h>O2d}b zOuvJTfp?!T2>i755gM?x@Az!NlF$86pZ7rRzRU2f%zL%BKaWh@r~akpMNdd12GXFf zdLzmwG1`;s7XfT5;T6`A#g9~ea&8@?Cy(SV=nm=*EftG_;On~zIJ0eUX%PHVH-`sDwQ37j?KBrih zXgVh#O*=yHX&?$W7Op=0-v!cLKuU81fp%D_pK2NH!mOhU1@qikyn_kwZZ%c+(0{9=daLd*vZZ|fhu#sl$^l+QvOpsP z+z&2(r3b*kd9%FCj2F9YGgbaZ!T!xNT;q%9*70kx!lR4eVHtbqrc`gV+w{k7p+G#E z4*0G`Ciz&=D{+vR_A@JsmyL~Ue-lt#gtcMaVV)?-?$$@36ciiW=S(LphiT!>JCvndj<1bAPIk|G~-C9H$=H0H05MEy+Q3k?x@#pF?amh zS3?Q2h1Ra#x2$@#0g3~Adw-OQQz=CC{ps;dB}GQNGudQ@@~$apR(g0To3Gdk5Fo7Bg4g4LF_O z|6NS|2ri_4y2;e3_@vb5(nSQj3|JTDB^j2kQ1KAID7ijC8*hnq>7APCT&XIRQ%VMe)1oAmNc^Up!dnmb z#^3adZRbFgFORu1zFumKHs?3Eav!UFRziuM``N+c!O~;FFax>J!)Ymf_a)nsZI#x- z`lUPL^rTo;H6~?gdPDk64S|caY5h5}Yt zcM6{Jgzl+cE{ikamf-e<8porTY&92>&m`{F`PeIo9B>-9_4650$1FM5_Be;c9EH2+ z`vRBBk0;r5E3pw=z+X`b$+w2}W}?%pp{jaluJ4ap_?hnVV8gWzrwe-}he$<_Tix_1 zte@%i2Dz*PNp^c><3~DUo^MJfUj7hWi3y{e7gtJdMb!q!0wm zd-C>P*DWgFPiMXT7%to_U3Z_MFQdaZeS7-Ljb^k_Gr5CWxw5!p?IV*0h0>Z_3(yF) zQ4yh8?&52J@N%0l5IjKDOV+e(W|nu?QiOndNCN0sA=3AKo8v6*+qZ|ze8f=>gAB7v z*Y1I$zW!r`vUH$}!{*G?M^3bPtq>P6#Cg%>YK#57)17aI%`s^{y0Le}K^hl5h8?fs zW^_a;ZWxLbrljUsY9`;&hpz7US6%jF`?z9v5o>j2?#Vxd%nl0lvbH9gIZUuRt9hK= z6m;35Luu|*kE^YbQn3*Go>Owx1-~*43yn$GiZs}Im@f!a=Z%;qzTC3gq7o%81WeJq z^EmDtZ17?Vrsxu;z~9oXku|9z`i$0hwP#wkFda8O=2Il>D>xQhk167AO}XpN_E)=Q zn>(svSZ(#Zdi&w-tC|A`FZP*C#jM3KFK7J?hcm)fR$hm5)K72v(wzz}n0OrY!Z2S` z5waV9DZtP4x4S=?Z|QX-bfo?vUe%hMY1Ewz@nBvdRzR8?&qjZra#*tE@HFjZ#@vo! z73-#Y`=Ijq2hNb(r;I|wSqHhnp;mST7(YOR+mJaeWNhr@4yLB!_fOrK2ig-{vLozu z2xOpmiBJq$Nv{vI+Hy9(ggNf~{fL9`(jhCR(VxKt+#dii>-R>uUVlTjJN;;%UUP7pguo@?-EA)mVG&pED~^D9j?^H6ro(`kAJ0H(rCY~de= zql_O%&}EuGWq65hHe=~;dHJ5{I(!AFxyl37luRRvnA$h8TNC?-6_f-#`SaejmbtGf zI}PGhKPh>Xu|2(8T#(cdf>l>C{7XPG!zKlJw7Dab$wuT?#LW z*Gh2-h}`xg^Txh>410OlM?@9Jz#@tuC0bCWDM@zirjF@LJip(*S3%SDSnRem9h~3} z*_~fce>D)JToRuBl)lQ%%wnE$&~JEG)}C>b&tfjwuxWbXqSaNmH=6MDWxCQrvM1{d zsD7^A7Fjc=UqPWBCK;tm|2!fim##Lk-pcUxNGtGvS9hngLvN3@&o{4bidddh(CwhD zg;;2ZoVyWATMTCaZI=TdHL}}#S8KH;Rpa41P8}*a*yf!nVy33##71X0+1M3r-^hE8 z{_ZG(to~a%cJ+O07^U`o)x1ya>u72|kAEDc(9U770CnDWiv-#Bo6`VOzdtcRB%9ZtghWeYcqW13$ z1T+gtW4>^R&JhJE{r955;-cke+KEkp;sH=8nYbW7<9 z)SSpFKZcSdhN{27FOyOCWr;?NJ;()5U~j|Oi<_Lb5%6PBPdx}1fK9;6Qh1m(v(tuM zB3|7lV`R||HY2i;##Q?@e8bWjbqKweWurWk-RH#z{Y1gW}eURJ~hrQw#ty{qw%w(Hn<7pBVhU&cXWdS)PmcX6L+T ziYvX7NHFiO0~r6x&;E-<Lc&E#yd>(t+d<}W;PXsmMM=X-3F1d zTVMlxY?>G{h#hJWnSWO~Xp-hto+j3sIUBD(YEkH5%2JAQA!cv!;s8E|nE^$#fLq@X zhGjvifW$-KP#DS*9ax_m$cP`aquF-CU(Q)y(}Q3FOr6J48e1YbzZXTajkGYw7iz zzei_;ca!bRV+3QVe33xSBpEEd>S$0nJh?)GD z)V!?^Ox$}z^zOuY3F#)pw#>EoGh&2GXE!H;vsIul8)lcRZ}^rhT1wMV1U~5^5~F~~ z6w-munMOVzN--v$7%%F+x5=L^H#UA(blPwkH`Vngl-FjJ!ru(`yjJ}7hm*z~%q6ny zZ`NA-RsEs$E6OUyz4cS+X1e!veno`K-d>L4nUY-_o2kA1o(9Ot1N9{bZV@MJ^`o3_ zW-5iRx@gF`^%IHgJKMR~o@Hbo$dYHllYxl^B5Omr`ud5Dx>}!P=9-(xvlDX- z-nP`+6zU9rDQUOTzkAa_&vAYMp}VRPnyN3y{Q-*ZM?Zm1}ByC%okZJ5^7Kq zIT2XV&ojb70WK-})Z_Nao|V@g3H=E_+9pYKh8o)1Tu+tytbinvQb7Fq`!Bf?)R-;r zmFT}tBPxxrG3Wdd>K?@NUO#ECqt8V6f>qaI-KXKOI08_;qAnSh=1T<`{n2;^9oLsseI^c3qZ5Q!`WFt*>x&Y(d&Vh;B}vg2>e zj&Js87S_v7L~q8rk(w=}57zCt7?jB)$o67Xv?L6+vwzgt^TV7N@^K-Zx?UVj8ps;SZS}vAc@ecw4KO6Bvu&QZx_KWeY-@Z zk_u0)@=&W4I(zdWg4=96{<9pRC$0dd5Z=3P9Onv>t_+!0P=pd>>23ifZ0vVH(drwd z!vltV8l>@kvv$Z(3o5DN~ zVuilI_$`NtuXccFP0u7pst3VLwi{gQ3>eQU8|MZS_IR)ZA#x2>qZdfyvNF96l>}t_ zpH@{Zo3_r%w|;+lU_pe%?crEtn;VbiGQqtIny%=_EcWW1zUU#^<^&50C&&g1TWnf@EW0hF+m( zI%c(-2$*B%Il?<5Fesnd;|0B0Kn22kJ;H4fai8quHSkClpz8n80ZaM3s`$@!fX2C! zXADP!CR9u)-bk)xEj81MZg@JzuOKfsF_i%Vi_9;dkS87{$qs;A@~Be#onWA~^ig*ddu>pPk)=zlEcd+r?Y4JdP-EIQDoBbk5XhHRSqJvebhSfZ>bi3?4Ru%`#dm+^vN z2wS!Yt{8sbyMk`~%oT^ijL_L;6h+9o0C6)22)CnMU5X5Vq9l0ow9e>7bH@CDes}8= zAv9gNyA`C6nL{Sz{=#mly~O~~1R%E0)|7fQWN8?H9HigU{uqRT$vNGbQq+oJxw{1F+M8UvxlKC(`b4()AXf9(K%RvAGGj> zl6LiRmJu$=7Q`y=_F`L;jy4P()vY4~$*NlAgEG9|ONEiUsadM>1ezk^&SFm?jFeJ6 z;`th((Fp<9)PKeb?|xQ3f-EYn25^m8JlURN5Zaq445w<)kVAcF38c9VkQC+{9Fji= z0nixP7J*fStr$SP{d7lFBrWzHm%Mf1qdOI=A@>=TmeDEu?5=Dp1d1fq#w4)e0Kuu#}^=#i9;ibf9Kw4`230C z*|2aStQV^2)U@|XR%;*R!>JDf`hk3PL{?dv>A3-{zhwOl?cm1ChK;CVgL zTu7z#KI1+!$x4qyy+}Od9$N%Z%qxmsq6fn!2D1=V{XUAU1hCaFYX|VNX%PaCWV`6o zHI!E^ZI!IzTsaW#y7C1_h}7#oD*7iyJB<$W`n3~c*=?aIBaCt+FR>GM*IETTTo%Ax zFPcUjpV86*wb%+HW4y)^!%8MT_wxq$7Sy|UkJ!Hc zbYnl7Bau3XwL*Bf0Pm&FZvfxLz4%!X*=Xb#EqV&B+YmjtA2d+3LVaYRSgH`33g`DE zV`<;W(R#_<40XdoNix+Lo~H2^D#OpU_v*i7yptB<`56`|Qel_Q03TR)(lCg2Za>Qq zm)nN}_)f}C&OtJZh)?ecW79{>Rb~+v@57O9A+gsiU4#T{o=fBAJ>Ms>yXYks)ufXS zyK^AOIh19SE96i=AuC2NP7w|lu||#)}LV<$uj zu{US+Zp2c_boN||kxgGW&hKqK<6rWkAssogH@0_$Ivk+(!M(hi0)_$Jm%XNXfGFHD zpfXE)KIN*j%OzMM%KM#>zxs;_U)czm=zNEbNz!2zgZgIbNH6gg=Wq!YW4eVhGTg2+ zsxp_FMu>ZpHTKm|Nqg^oCfJNx`k19wRE;s!hJ&O=dQ>d9Tk7)nDSt`_?68^J*3wqb z#iS@^Ig0DJ0=vW^&y2?;PQhfbO)LbAT^wfUYHPj~m;KtD7cYt4LYa%nDK+SkmbQ6o z!{^D38tt*TA^=@bM;~smyq6q-jF?}rFKItfV6cu*AkJ+7@$4t&RMRnOW#;#UXh7o4 z&|OgMD$cWmbpsHl%1}0cB{(+(5_Hu+eFq-R^rT(fSly57+GkH$(9M5Ql-7?c#Q4-; zf7%}yyq47LmQ!r^8(|1l1+5B`dU^3*jU?eoego%2nF~>28)S-R!8y0XMzwgkX1J-z zjl$f%_GypRWUO$p$_^^z_uvAZQh?fIx=BP)KesxY43!zawOa$rlgKO|H$QE*3*ed)SoGdlECS1N@_6hX z*V9+&&uR53$XROk*id96zq(YAJxr#OH*ehL^%l+FNZ=~dyb^2J$ zgvcP#09Q{}re4lvbfWs;%6H_XG-3`#jyJ@)#eGvZJeGzs2d(7R`BLUv#E85#Sh0AV zphWE@1Z>b$(H+Q6=rU29fuh>Cgb{9!)tob*i?o(net>FEEdV!kb}hVdQ;B?x zBfL(FbxKL`S=E64lc#U*`^s_qsg_wSM6mtx&4EsPiF-kb+*w%>LS>v8lTx9n$qZ9C zE*i#@&puFKY~%qv>n?Z-DQ}Dw7~JV|LZ0U=NgN>5F5cE`(yn?Y6oU#u9egPZOGziC+s3_b&#T_v9b#~T=(#XO+vvqn;2)~Ov4Jstx5~8Zww<1 zRxk1VZw==FR3ZGVLbitgxkano_JbM@+>dVZPRk1(QSYM{()n(+k2vkQ_&l!l%ll@a z=5+Lqd$ZaQS$}qUO(i1bElq9F$R@JhEy&U$WGM6NGuyY;=13%g^aM`%Om+rKk_{7c|?LWon#O2jEnSY<4Y9 zv1nTSqp~ldhQ*S9k@dZLNhWMjktJMRzt!cUVirrLiOi&N41U=ZNrL|EpUxq3I~m8< z+dM++(T9IZ2X3ImMY>5zRLI}eeI|j98{dRP_tSm(^6y`P%B;cijo$|eNJN}qb?Hun z%3A+?#s~RNmcC;{H?6?i_-NNz35I@}0W~Nm#r2Oof*;s*zP(m>pA`+(DH&;=BP40{ zQMXMcKJ~P6zEqotaqqp_%lm)60IhdxKHDk}W;Lgd=r@-rXSTk28Boj;BDWDr`-WF8 zXsVyNsqH}nz0IgFZ!**0l${A^7OOkLb{`rvdW>Z9FeCf0ZU|BR&+39Xnes2m|NQ=T zbW;skVwbe<*1}~6^%0HK)6+%17bObFEhYHH-|r@k1i2fkCen=H_rdp2M3zdbTm1QS z?;`>`0*g3fTd|%sjLg|?pUf&DXTo!L|EWYb0l$9n+W%CGUqE&5@Bb6S8^4B{Y3#ZG z9cy3rZ@E|km5?Rq@RjR~&xfT59E!sacloPbjaM5U!&wB+Ist%i#La%LzHA;)#g86; z2ljXi*lxE0JM?{J+iojzab7K>azZ;l@;vj}eZtwu1&vVa}(w|4vwPD$PmE9$fG@${XAzX^y;$rnBDuqUU_sTYAYKM0HGZ; z?3m_}KLz?F7-Z(SIh^rp7f24BvBjVHz-vq08_)Kn-sY2-<+dDlh8SOq3Jf{xnYt-ONw1cvUlBXZ4yFSNE$NXr zaq6^#@+YnKCVRj|)I8{lEY++eu?+NYVGckvwYs=uVrE_hh|!%A z)x2CfokP0e8PM5P>eTIe--|8G!t@g$tDW7pMq@^ku)i3nZpyl#z%@BOaP0}Xtjh5F zobI$ol_)QaZKYMePrG$y^`rBnr|oow-i7u^u{wX}m`zsCZ*>RkZuMW=A^COr!Pm~5 zwniI7oi∈2f<$fUstDGnvKETJ=gmS;~4Qkgq=MhJ#sZV5?(>g{|%1DFP93b9S5agd9M+ z4SS#m3f@g1q|%o@g2TT6)%(W8!5I4GMyO)%$s!*@@&rma8V#kg`gNY?eV=wHjNx+y zNxi1tmZTKS+skf~s-h=aR@gRBN7)H4(hv0Q)p=t@4&?*Dz#$(`5CxUp24mwB@=}q2 zC11#cZ9N6CM4P|_fR}T;IbgkuMY<&XYD(f%x>ZdLFosVxc1FPJs|ikBoR6Q{l}y1zK{QovbCxqrAp3GatL{f4;wBTrf4e1A=pF-1$=^M0Q(1Wx+Rk zZsr42v4>kL*BzkQmJ1-*=qpD^g$MN|zW)(V#QfjGUH0myR2)HI@2qfyA=m2xsX7Sq znK^<;m*=o}7DuMgS~U00`#2$8IWenIa&hmi@Ou#5pJM0(aRH$e$Gb@dj#+>M*csoO zdSUgVh#0|nD=s5FA&1pwZGa5T2Rv$N`S_RMC6_bq5@_pobGC+4MG`8#%a;tktW3TF z*A(qK(|mcUzLAb-CcU#*w7!pp>1?f^AH3ERo&41OjBFd=X4izL3Z!qHfqLrqQIu^U zxtVWS03qrLl)$o^cpeJn4LhE$p%C68#{@8glM9maUnT1NLQ&6R!Vpntrht+wma`{J zfuqe(@3=T;aIN{8)z|EdVstCjknLw`E_+~0_v>i2?(v)0BwA#D4B6ICc2hRoAT-JQ z2G&a8SNGAHef3MWs&q4WBtKK&c4FRal;2~gNHh&AUG$^Nyd6Z)e+N_aU@wQDbZsdaO94Wm^#v zV&$;NdHw$E@`xBk0jtq5*%)Xp1#h3j#YH!*HO61r1(jctGlYzCb67IZW6mbwo_SH< z?YxP6h0GIjFmWCH^jLmj&v#S2&1yTmtZ+J$YU7TVjE-&m$2C@+v+Lk?8YBA*RPZJ} zazx#h_W8S|0*m^BV{eMAZk_|i_7dgx*6TCyQ=b>GL1j1ey9yX_tCz6aXvK-LY#NNU zQ!g9N(Hie8VKqd1$-)wY+y%1o_bX|4@}A37IUjJh`wpP;v7B#?Gk^^9VZ4o$yGmgJ z?y$Y14yUgeU@IEG;1~)NWvKM#l9E=pOyqDSEnJ@qVMLW$dKG#$=Qq5vrj|~7PL*!D zG(sB<%Q&*Ei`CN?fmH(sgqMNXG~V_)ro%i4s?<$dy?kirVD zPWvrbQxTO}Dh0x$PvJJ=&Cmi5y>Gdv{>dBj9M&CMfw?O%Ud7k%)z9vtxpt&mUMNxn zUu-by`(xBz z>kg+f{QHs0sBC4+JY;0gly!7a_Bcey496}f2aymWGBdJeW{;eZ-LZ-&J3C|^lKtKf z^?QHsU$6h1>pJJT?)$l)`?Q`eLI%@|LZ}}yRt$P#SI4#$I0?1TEWB4;6Kqd>a_fOTJ&l|bLKy4|Q`2wiy;2nw_&l%8Mc_-ZKynXFiqp=Ios}+e6$7wuW|GbT- z^_xLk+BoFf`!a@(34QCvJSAU%GxltJ=(>jDW>5hYsXlq^RYGi-_gcqCq=Fc)L$o%! z{Cr_X8T-;ftB3YQhpl}vViyZVtrr?ieT;aj_Aj|wiZ>}2q~CP9cG1D42RuC|T z@dA{%nB3w8qAM0C@@GNOZJX+x;;D$Qd>KxZOf7T%&rh|Ho2O@iv?lE@7U7FxOP|Ok z31kuq+V+^TJBb<2af|wAK8ZHF$DqFw`4qGn;iVn)r5__|Q8Yw7n#!ljMQYES>BiF! zh;FnsAALN>x|vOhnE`#F&8T&A&2jLYA4c*>ALsF?$M?-pUT4dG3I=eQ^^Mn@hXit? z)mCjjxR?G4)3p@+xQkOAw^YyHAPf5iXOS*cP+%uqpoZDcG6xZM2c0GVrO4}?8rEbJ z*e+wqxxch)@o|6|#8SJ3oye7l5 z&#%}R2k6~`H8Mvpx9}bTUfey}363N}xadldSc1-#9Ax0|D&ip_{Ru$-qKCW}SO!Qn z*0ZiG(FxW~ms@bGk>iqn`d{NArf53)sBuJ&WR4Q}tq@wH7emYs`T{+UDDFj!e4bW1 z*I2yMn46BTrEaj|j&hkIaOiNNnrG}s)vLdLP~dGu{tvoQwq@9wY30NI3iye1O?Pi1 zmraRx*@V|?glp%+MmbTi^Kp#AyJ7FrCBw)v^~_cj{%EJNUaA)gR8|>kS<*6OLEtym z=Lfej3WEYY&z>k6@ro4DHBL2!=_zutR>fC0S`b{B+Yd0Ol{aZ;FDapb6}%=dobil} z-Xx}QXLM(eDu^?Eh-_b?30qg?|Mob#m)&8q(8{@yy^l#fAmLK05`&7d;m4@(>N@16 zOb%UBtuBrk^p2a8Qfw{gG^sRxNez^God5mjR6>R(jx}c%$APs~V7}xqwpP$62u<= z&cq@Yx&MB6k&vspr%tLQkM1u%@!5q4S1!lR4eVD|32X z_B0U7#l(&p6fqUz0)XXW%&8x7Jnow*#G4Z{dXHw43~RMtsn#x(+kDLnx2z{=z0KGk zQ%1=F*F%-IV#-xspDEx!o>;(O2yXlwr8E|@de8HIZn&rWeC%%~sUG_-*ZX{$?k!Go z*+2^U`|pHku@i~So3gXU?l5l8_^r(0&DUCh zszm6WIl+f!l+4G!vW`|53c9)TM0CwpP4$=z?4(_H^teSg_Tb6YdyYF_7l8*IgBtB$ z54uZ)QB%bXMRDTyT4KfR4fHjTrlEWp-uZ-CREwU!&%bN5zn^3#nQd^R8=V`ZbLIpn zn^8ZdTg&Qd$h)#(;8Z~K2%pccX6mYx-c|if!a_0k_#SNB`414+k#vgNI*{_wwAOIF z^v5@RTq`4U*d4hVt#?YitB)u9yHBQS>&lOm{(RB|XTEouh*s}+02g`o+O*8(#;IWXK+7JUPG*Th{I28XIX15{5n$LQ*t{q|ZMF5^LM%p+|dru^`eW%HEktjOOJGELoyU_KPy6B4CYzZ( zm7(xHD0k%$t*v@8!DPudk>x6;{n-YZ7*r|mTC}^-9?(geHuxUo^R56XWJ^(L8v_cY z@#LL7fCwXyfmV{ZgF)ryZfZD)NCd$MC|~cHC{?O3Z*VTilmpr$v?UAIUvQGZKcDgu z@)ezax<>AM`(_~*e9l3F5igBb`R_5RcLG0KqIH2i+1|t$c!4fTL#n?OK$!6ew>$!L z$~q7khhqhVi%*p8z4Ciqrl1b{f-YsMa2)CrC~cwgbr`PTyuj8PptE(n{k^CD)2uiC zeh*n(Xs<5mJeO5SihVFux6-S&-V?1eeq(>_>HSnjh4nF)<_jnwfbz7~U%PEwjat44 z!i63IrJYH%H}KIiRZ&s#Fgt$^5CP(W1=jq2)na}Dol>mcIcPI5sv#zFAH5xK+*~<6 z)m2EEtX1spi)nyej*Zzk1f>|eom3V?Q;~^ebp(YdRKZ6^-(FB9AvtztE4S^vQ=`L# z`05O9JlZfwVrbvHUJ}^mtwOK`1nBLI=kG%|M+-|t*y)omr(j%>1fdgB;!${R)w}`$QfW_*7{%RQNkBn=-r~s6pYa8KyfYdVD zaHwAi3K|cZRp4y$a~y-ey{fM%V-TG3aQKt#bmfDA&@2Wh%}9D~A6(G~O7`7dI;KDY zVD-SKVj6N@`$IMg{u@|nZz7=1xe`u7-_hv~sy|j92L*8}8Srv}aS@V)j^sO&Vs;CT z!Zm(&*Zulou)vjG zpq;M!hMz>L`j%h|9q(oOVC@Hv@50@>>DD$a0sxdBS1)xyF&-P~QVLybfc zE;ZZw!;CONH-MXl6uaNt@!&M#R<%u638Nb*)?Jx6{9VnktxYbm*byzWk^WLC{RPlP z7r6?d-?qQJqO^ZD9x&g8u=?%CH*TM(42CMe-1X1h;nVbn`)qTGZ$Rp%4`ssELHCWf zSH{L5dIl7+b0edhmKfI#JhmJmWCvKzt*bY#(HsE*9k%ulK>R!a`6N=v`{@M<7pd36 zp?*LTwV!FE3uVvjOw$_P{jv-&5?`xBfE}uEn5~#V4SVAiVDTsSjqg2h`5T#6K>+Zo z%6IclZfjgedI%7ql2n5QqiU_Q3Z$-bGV^9KQeIPj;Ng|Of&LzC)%b)UYRYEV|F@Q{ z#(en9f)*Rl;lB0;5Zzrg8qtmA6i3>FVUm&gGtpG1-}IMU#Xe0E-yl)v%lAR>IuEL4 zdT77RX{7fly8?rCHNo7f{`3iRXg-O7QaImOssVaeQo$@(ERRlR29O5`ypDMr>fZh9Hp6?;=i4ZydrWHRs6< zf`J-Mob*&b08UkXlzg@4@}C0#NG>;GtA#O;5<+!79#fc<+q?;2^Sv)eARNd_o}6G2 zGN|w(nF0MD@KBf6GZ<7X^VV8WyBBp4KxMmlT814&2m^bPTyTcsWIa&Pif=&LN$ueq zhwG6KLX*h&#}pCi-=jW0Jy!=z_jaex>;%t48BGH;u-*wER?m1~?kj1LI>JxraaXa` z3H_sec$@R1bT?5y2HWi-4Zt1do+pm2zea7BKOdg~^5Z!5$q!>;n%DVC-|Z;wSSOp!H3qXO?yd=2D!H$+k5-rxJDM?41&N4M zX;0Z4K@CI+1m}t~jWvmQfCzFnLKTIyC_dEpxS0;w{`de&Ufe8*27LtHot_X0CWcm5 zmMTI|T2;YzxCTYF%~rjsxFRZWx^q)QhJ$-(u<;cSFN^Gg5o1UtOK}trvr3PgydzSu zO-BUaGoXVe;14gXu`mk- zcS@!op{B!?4wv+!xNL_wldne3Pjm5lO-<|fzQ;o;!Fv~t(imlA3k}=(HM68nEcRGL7?XuQ=y89nli;2hJ-qLAqKZON6N zfH@$td&7L;*Fr~N=H{BQmxk>VYcEIQdP1+cW>z#vYR$r4Cpmz2N(MqQ|EVBPL}GV0 zA}q9=u1>2I=G~gY;Mr1#T`qVf5O zgCtp&t6wi>7EeBBflHN(T(+ia?V5W}Uz8Lmq*xka4-j#S)H9V?55RyQR`oD-HUWOF z^T6}p2_NaN^B{oNz(Gi|cjCkyokI=rgdauslD3*5oD$shGh8JUyeO%-@S$Yf#>Hib zCu7`+=|k}BnDc{oaBj1e2IsZ8$7nx^Om(v8rVl{wcuiIaN)s}hQ|99!S3oB0*;w@$ z+tS>XlCkDB{yD}h^E7?N>lHoY?Cao~R*`vsSSR2d{kfLYVnR;)W1k@#AAhl#geUpO z#G^aU!G8FT9HtZ~zSur0PGBp7sM!qba4Ko+O#|!hkS?H~LTAtlME{m39k+VZCW-m2 zLK#1EWn;C|h|E8(chTSh0c{!7YpeJ+vgRm$5cmkZ}1E5vDXv$U;8C-WHtFras9WcOB2%NmGeuDMsfyxjjbl5`o_T6)tOu zTm{%=xF2k0O@(W{ErZP!e&5P*sj*ou0Yeg{MpN~f6NO$=QzgiF*{m)3O}b$NceOl7 zZdp(KNJF(brZ#p29ARZOMyXr*SrH;8)Pc_~#ZjnSlomWw- zf7Yj&3Dgqm;OS`z+Qx`b0qSl0{+u02pUEfB6z_X58xALea9fX>lEO~-K34n1Pp1+k z1aEO8`nGHW--bsj@8WweiE+`2 z-E+m?2@68AZethlnHGYs9b|}sPFaBl3$yXoONVFS9gair$=a_0c209IQq>PFyOi3S z$CcX{znAc_tv}(%b`iC{2<+^{D6R$XO>Liz_k(?_BYy|Tmx{iw}bt$-nP>EC7Zh0-oOG>II6tBdl{N6+Sd@S1&_8L?sG8Y7kC zHa^)M2G=m^WtJI4zCW%dKVK#mGidF$<(M72T%WGMXf)iq|KV9Avpl8-aCADSy)f zmtvzU62GN7l2K*Tlek37yLuPF`{txbS|oZcI6f4QL<;k4m4#*BJZ|$pM*QD4e=Fy^ zhiy{3#UA*PIk$+0RI*#(XJEXMCqJV=T=8qTUEI`7e15Lm6YqP}t#8m({P@m& zttoxAx_kiu1bl%HzmEHErDQytcfvmaReD%shRFq&ezV$+?i8s${hs7LnBF7eQ&3?~ z!@DYGpMO%%`(sIBgsQL({Y%`E_$cZjwE}3G1bvQWyl~2 zAe|76=_%FCPqd0(#O*RkH(k2Xmxv2P|1lF3fnEh1$^kZeGEryPfu4n}gzuks(*Mt} z@!#Nc#e+7z-VUGPU&AG3HfzFJ*a|qJ=G-j|J&7R(^#_c`-)(&U02sYo%E$cYitH~% z2yV^!3FA=$z6aVw%ke*Bs*XUIjf+wlu`{|-tm@sx}hBDzoDg-^~jXizJ^ cPn0_%$7XJ8z^&xePk}##${pomMe~6F0S;ExX8-^I diff --git a/docs/installation.md b/docs/installation.md index 74b9ddf8..77f9178c 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -41,7 +41,7 @@ PLUGINS_CONFIG = { ``` -## Plugin Settings +## Plugin Configuration The plugin behavior can be controlled with the following list of settings. diff --git a/docs/navigating-backup.md b/docs/navigating-backup.md index 2571bbce..06e31a36 100644 --- a/docs/navigating-backup.md +++ b/docs/navigating-backup.md @@ -21,65 +21,10 @@ uses cases, the following settings are available and further documented below. ### Backup Repositories -In the `Backup Repositories` field of the UI, configure all of the repositories which you intend to use for backed-up device configurations as part of Golden Config. +In the `Backup Repository` field of the Settings, configure the repository which you intend to use for backed-up device configurations as part of Golden Config. Backup repositories must first be configured under **Extensibility -> Git Repositories**. When you configure a repository, look for the `Provides` field in the UI. To serve as a configuration backup store, the repository must be configured with the `backup configs` capability under the `Provides` field. For further details, refer to [Navigating Nautobot Git Settings](./navigating-golden.md#git-settings). -### Backup Repository Matching Rule - -.. Note:: - Only use a Backup Repository Matching Rule if you have **more than one** backup repository. It is not needed if you only have one, and will cause backup failures for any devices which do not match the rule. The setting is mandatory if you have more than one repository. - -The `backup_match_rule` setting allows you to match a given `Device` Django ORM object to a backup Git repository. This field should contain a Jinja2-formatted template. The plugin populates the variables in the Jinja2 template via the GraphQL query configured on the plugin. - -Say that in your environment you have three regions in which your devices reside: North America, Asia Pacific, and Africa. You have populated these values as `Region` objects in Nautobot, and assigned a `Region` value to each of your devices. You want your backup solution to scale well, so you have a dedicated backup Git repository for each region. Every Nautobot object has a `slug` (URL compatible) name in addition to its human-friendly name; our regions' slugs are `north-america`, `asia-pacific`, and `africa`. To configure the plugin to match devices to the desired Git repository, you must first configure the GraphQL query; a _VERY_ simple one might look like this: -``` -query ($device_id: ID!) { - device(id: $device_id) { - config_context - hostname: name - platform { - manufacturer { - name - } - name - napalm_driver - slug - } - primary_ip4 { - address - interface { - name - } - id - } - site { - name - region { - name - slug - } - slug - } - } -} -``` - -The query will look at the `Device` ORM object, and return the values from the query as keys under the top-level `obj` key. The `obj` key represents the Device object. With this GraphQL query, we can make a Jinja2 template to translate the returned values into a string. For example, say that you have a device which is in your Sydney, AU office, which is in the `asia-pacific` region in Nautobot. If you made a Jinja2 template based on that, which looked like this: - -``` -{{obj.site.region.slug}} -``` -Then the template would be rendered to the string: -``` -asia-pacific -``` - -When you create backup repositories, pay attention to your naming scheme. You should name each repository in a way that matches the value of whatever parameter from the Device object which you wish to use to sort devices into repositories. So, for our Sydney device above, it would work to name your Asia Pacific repository something "Asia Pacific Device Backups". This would give it a `slug` value of `asia-pacific-device-backups`, and you could use this in a backup repository matching rule with a template like this: - -``` -{{obj.site.region.slug}}-device-backups -``` ### Backup Path Template diff --git a/docs/navigating-golden.md b/docs/navigating-golden.md index 3a4258d1..bff8621e 100644 --- a/docs/navigating-golden.md +++ b/docs/navigating-golden.md @@ -36,19 +36,24 @@ Each Job attempts to provide sane error handling, and respects the `debug` flag ## Application Settings -The golden configuration plugin settings can be found by navigating to `Plugins -> Settings` button. Under the `Golden Configuration` section. +The golden configuration plugin settings can be found by navigating to `Plugins -> Settings` button. Select one of the Settings, under the `Golden Configuration` section. +Since Golden Configuration Plugin version 1.0, the plugin allows for multiple settings to be configured by the User. +Each of the settings, has the individual repositories and configuration details, as well as the scope. +You could use a combination of settings to customize Your Configuration Compliance behaviour. +Settings have a name and a weight. The weight parameter indicates the priority of given Settings - the higher the weight, the device matching the scope defined will be assigned to the scope. +At the same moment, each device will be matched up to maximum of only one `Settings.` In case of the same weight, the sorting is performed by the name. ![Navigate to Settings](./img/navigate-compliance-rules.png) -To configure or update the settings click the pencil icon to edit. +To create new settings click on the `+Add` button. +To update existing settings click on one of the `Settings` name. + |Setting|Explanation| |:--|:--| -|Backup Repositories |One or more Git repositories where your backup configurations will be found. | -|Backup Repository Matching Rule |A Jinja template to match a device to a backup repositories `slug` value. Required if you configure more than one backup repository. E.g. `my-backup-repo-{{obj.site.region.slug}}` | +|Backup Repositories |The Git Repository where your backup configurations will be found. | |Backup Path|A Jinja template which defines the path and name of backup files within the backup repository. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`| -|Intended Repositories |One or more Git repository where your intended configuration state files will be found. | -|Intended Repository Matching Rule |A Jinja template to match a device to an intended state repository's `slug` value. Required if you configure more than one intended repository. E.g. `best-of-intentions-repo-{{obj.site.region.slug}}` | +|Intended Repositories |The Git Repository where your intended configuration state files will be found. | |Intended Path|A Jinja template which defines the path and name of intended configuration state files within the intended state repository. e.g. `{{obj.site.slug}}/{{obj.name}}.intended_cfg`| |Jinja Repository |The Git Repository where your jinja templates will be found. | |Jinja Path|A Jinja template which defines the path (within the repository) and name of the Jinja template file. e.g. `{{obj.platform.slug}}/{{obj.role.slug}}/main.j2`| diff --git a/docs/navigating-intended.md b/docs/navigating-intended.md index bade01a1..7a02790b 100644 --- a/docs/navigating-intended.md +++ b/docs/navigating-intended.md @@ -2,7 +2,7 @@ ## Configuration Generation -The Golden Config plugin **Intended Configuration** job generates intended state files for each device in the plugin's configured scope. An intended state file contains the output from rendering the device's Source of Truth Aggregation values through the Jinja templates used by the plugin. +The Golden Config plugin **Intended Configuration** job generates intended state files for each device in the plugin setting's configured scope. An intended state file contains the output from rendering the device's Source of Truth Aggregation values through the Jinja templates used by the plugin. The job itself is a Nornir play which uses a single Jinja template per device. Source of Truth Aggregation data comes from the GraphQL query configured in the Golden Config plugin's settings. An important component of the SoT Aggregation data are the `config_context` values. `config_context` should contain a vendor-neutral, JSON structured representation of a device's configuration values: a list of NTP/AAA/Syslog servers, common VRFs, etc. See [Config Contexts](https://nautobot.readthedocs.io/en/latest/additional-features/config-contexts/#configuration-contexts) for more information. diff --git a/docs/quick-start.md b/docs/quick-start.md index c2dc1776..11b2e735 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -17,13 +17,14 @@ Follow the steps below to get up and running for the configuration backup elemen 3. Make sure to select the **Provides** called `backup configs`. 4. Click Create. -3. Next, make sure to update the Plugins **Settings** with the backup details. +3. Next, make sure to create new or update existing Plugins **Settings** with the backup details. 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. - 2. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) - 3. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) - 4. Select whether or not to do a connectivity check per device. - 5. Click Save. + 2. Create new or select one of the existing `Settings` objects + 3. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) + 4. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 5. Select whether or not to do a connectivity check per device. + 6. Click Save. 4. Create Configuration Removals and Replacements. @@ -61,13 +62,14 @@ Follow the steps below to get up and running for the intended configuration elem 3. Make sure to select the **Provides** called `jinja templates`. 4. Click Create. -4. Next, make sure to update the Plugins **Settings** with the intended and jinja2 template details. +4. Next, make sure to create new or update existing Plugins **Settings** with the intended and jinja2 template details. 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. - 2. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) - 3. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) - 4. Fill out Jinja Repository. (The dropdown will show the repository that was just created.) - 5. Fill out Jinja Path Template. Typically `{{obj.platform.slug}}.j2`. + 2. Create new or select one of the existing `Settings` objects + 3. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) + 4. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) + 5. Fill out Jinja Repository. (The dropdown will show the repository that was just created.) + 6. Fill out Jinja Path Template. Typically `{{obj.platform.slug}}.j2`. 5. Determine what data(variables) the Jinja2 templates need from Nautobot. diff --git a/docs/upgrade.md b/docs/upgrade.md new file mode 100644 index 00000000..c01f9f3c --- /dev/null +++ b/docs/upgrade.md @@ -0,0 +1,10 @@ +# Upgrade + +When a new release comes out it may be necessary to run a migration of the database to account for any changes in the data models used by this plugin. Execute the command nautobot-server migrate from the Nautobot install nautobot/ directory after updating the package. + +## v1.0.0 +v1.0.0 Provides a breaking change for the users running pre 1.0.0 code sourced from `develop` branch of the plugin. Only users of the `Backup Repository Matching Rule` and `Intended Repository Matching Rule` features are affected by following behaviour: migration script will only migrate the first repository from the list into the new default settings. + +Because of this specific behaviour, please review your configuration and capture it before attempting to upgrade if using above features. + +Users running the released packages are not affected by this behaviour. diff --git a/nautobot_golden_config/api/serializers.py b/nautobot_golden_config/api/serializers.py index a92c6364..0546fc67 100644 --- a/nautobot_golden_config/api/serializers.py +++ b/nautobot_golden_config/api/serializers.py @@ -75,33 +75,6 @@ class Meta: model = models.GoldenConfigSetting fields = "__all__" - def validate(self, data): - """Verify that the values in the GoldenConfigSetting API call make sense.""" - validation_error_list = [] - - if len(data["backup_repository"]) == 1 and data["backup_match_rule"]: - validation_error_list.append( - "If you configure only one backup repository, do not enter the backup repository matching rule template." - ) - elif len(data["backup_repository"]) > 1 and not data["backup_match_rule"]: - validation_error_list.append( - "If you specify more than one backup repository, you must provide the backup repository matching rule template." - ) - - if len(data["intended_repository"]) == 1 and data["intended_match_rule"]: - validation_error_list.append( - "If you configure only one intended repository, do not enter the intended repository matching rule template." - ) - elif len(data["intended_repository"]) > 1 and not data["intended_match_rule"]: - validation_error_list.append( - "If you specify more than one intended repository, you must provide the intended repository matching rule template." - ) - - if validation_error_list: - raise serializers.ValidationError(validation_error_list) - - return data - class ConfigRemoveSerializer(TaggedObjectSerializer, CustomFieldModelSerializer): """Serializer for ConfigRemove object.""" diff --git a/nautobot_golden_config/api/views.py b/nautobot_golden_config/api/views.py index 1da1d40f..6604c3ac 100644 --- a/nautobot_golden_config/api/views.py +++ b/nautobot_golden_config/api/views.py @@ -12,6 +12,7 @@ from nautobot_golden_config import models from nautobot_golden_config import filters from nautobot_golden_config.utilities.graphql import graph_ql_query +from nautobot_golden_config.utilities.helper import get_device_to_settings_map class GoldenConfigRootView(APIRootView): @@ -30,8 +31,8 @@ class SOTAggDeviceDetailView(APIView): def get(self, request, *args, **kwargs): """Get method serialize for a dictionary to json response.""" device = Device.objects.get(pk=kwargs["pk"]) - global_settings = models.GoldenConfigSetting.objects.first() - status_code, data = graph_ql_query(request, device, global_settings.sot_agg_query) + settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device] + status_code, data = graph_ql_query(request, device, settings.sot_agg_query) data = json.loads(json.dumps(data)) return Response(serializers.GraphQLSerializer(data=data).initial_data, status=status_code) diff --git a/nautobot_golden_config/forms.py b/nautobot_golden_config/forms.py index 90b4feb5..11088cb7 100644 --- a/nautobot_golden_config/forms.py +++ b/nautobot_golden_config/forms.py @@ -6,13 +6,11 @@ import nautobot.utilities.forms as utilities_forms from nautobot.dcim.models import Device, Platform, Region, Site, DeviceRole, DeviceType, Manufacturer, Rack, RackGroup from nautobot.extras.models import Status -from nautobot.extras.models import GitRepository from nautobot.tenancy.models import Tenant, TenantGroup -from nautobot.utilities.forms import StaticSelect2Multiple, SlugField +from nautobot.utilities.forms import SlugField from nautobot_golden_config import models -from nautobot_golden_config.utilities.helper import clean_config_settings # ConfigCompliance @@ -336,25 +334,20 @@ class GoldenConfigSettingFeatureForm( ): """Filter Form for GoldenConfigSettingFeatureForm instances.""" - backup_repository = forms.ModelMultipleChoiceField( - queryset=GitRepository.objects.filter(provided_contents__contains="nautobot_golden_config.backupconfigs"), - widget=StaticSelect2Multiple(), - ) - intended_repository = forms.ModelMultipleChoiceField( - queryset=GitRepository.objects.filter(provided_contents__contains="nautobot_golden_config.intendedconfigs"), - widget=StaticSelect2Multiple(), - ) + slug = SlugField() class Meta: """Filter Form Meta Data for GoldenConfigSettingFeatureForm instances.""" model = models.GoldenConfigSetting fields = ( + "name", + "slug", + "weight", + "description", "backup_repository", - "backup_match_rule", "backup_path_template", "intended_repository", - "intended_match_rule", "intended_path_template", "jinja_repository", "jinja_path_template", @@ -362,15 +355,3 @@ class Meta: "scope", "sot_agg_query", ) - - def clean(self): - """Clean.""" - super().clean() - # This custom clean function validates logic of when or when not to - # have a template matching path in GlobalConfigSettings for repos. - for repo_type in ["intended", "backup"]: - clean_config_settings( - repo_type=repo_type, - repo_count=self.cleaned_data.get(f"{repo_type}_repository").count(), - match_rule=self.cleaned_data.get(f"{repo_type}_match_rule"), - ) diff --git a/nautobot_golden_config/jobs.py b/nautobot_golden_config/jobs.py index 5c14de63..c291d341 100644 --- a/nautobot_golden_config/jobs.py +++ b/nautobot_golden_config/jobs.py @@ -22,18 +22,19 @@ name = "Golden Configuration" # pylint: disable=invalid-name -def git_wrapper(obj, repository_record, git_type): +def get_refreshed_repos(job_obj, repo_type): """Small wrapper to pull latest branch, and return a GitRepo plugin specific object.""" - if not repository_record: - obj.log_failure( - obj, - f"FATAL ERROR: There is not a valid Git repositories for Git type {git_type}, please see pre-requisite instructions to configure an appropriate Git repositories.", - ) - raise # pylint: disable=misplaced-bare-raise + repository_records = set( + getattr(gcs, repo_type) for gcs in GoldenConfigSetting.objects.all() if getattr(gcs, repo_type, None) + ) - ensure_git_repository(repository_record, obj.job_result) - git_repo = GitRepo(repository_record) - return git_repo + repositories = [] + for repository_record in repository_records: + ensure_git_repository(repository_record, job_obj.job_result) + git_repo = GitRepo(repository_record) + repositories.append(git_repo) + + return repositories def commit_check(method): @@ -96,11 +97,8 @@ def run(self, data, commit): # pylint: disable=too-many-branches """Run config compliance report script.""" # pylint: disable=unused-argument - _ = [ - git_wrapper(self, repo, "intended") - for repo in GoldenConfigSetting.objects.first().intended_repository.all() - ] - _ = [git_wrapper(self, repo, "backup") for repo in GoldenConfigSetting.objects.first().backup_repository.all()] + get_refreshed_repos(job_obj=self, repo_type="intended_repository") + get_refreshed_repos(job_obj=self, repo_type="backup_repository") config_compliance(self, data) @@ -133,16 +131,15 @@ def run(self, data, commit): """Run config generation script.""" now = datetime.now() - LOGGER.debug("Pull Jinja template repo.") - jinja_repo = git_wrapper(self, GoldenConfigSetting.objects.first().jinja_repository, "jinja") + LOGGER.debug("Pull Jinja template repos.") + get_refreshed_repos(job_obj=self, repo_type="jinja_repository") - LOGGER.debug("Pull Intended config repo.") - golden_config = GoldenConfigSetting.objects.first() + LOGGER.debug("Pull Intended config repos.") # Instantiate a GitRepo object for each GitRepository in GoldenConfigSettings. - intended_repos = [git_wrapper(self, repo, "intended") for repo in golden_config.intended_repository.all()] + intended_repos = get_refreshed_repos(job_obj=self, repo_type="intended_repository") LOGGER.debug("Run config intended nornir play.") - config_intended(self, data, jinja_repo.path) + config_intended(self, data) # Commit / Push each repo after job is completed. for intended_repo in intended_repos: @@ -179,10 +176,10 @@ def run(self, data, commit): """Run config backup process.""" now = datetime.now() LOGGER.debug("Pull Backup config repo.") - golden_settings = GoldenConfigSetting.objects.first() # Instantiate a GitRepo object for each GitRepository in GoldenConfigSettings. - backup_repos = [git_wrapper(self, repo, "backup") for repo in golden_settings.backup_repository.all()] + backup_repos = get_refreshed_repos(job_obj=self, repo_type="backup_repository") + LOGGER.debug("Starting backup jobs to the following repos: %s", backup_repos) LOGGER.debug("Run nornir play.") diff --git a/nautobot_golden_config/migrations/0009_multiple_gc_settings.py b/nautobot_golden_config/migrations/0009_multiple_gc_settings.py new file mode 100644 index 00000000..703371a9 --- /dev/null +++ b/nautobot_golden_config/migrations/0009_multiple_gc_settings.py @@ -0,0 +1,123 @@ +# Generated by Django 3.1.14 on 2022-02-04 09:52 + +from django.db import migrations, models +import django.db.models.deletion + + +def convert_many_repos_part1(apps, schema_editor): + """ + Add the current `backup_repository` and `intended_repository` objects values + to the `FK` additional intermediary attritbute to retain data.` + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + + if settings_obj.backup_repository: + settings_obj.backup_repository_tmp = settings_obj.backup_repository.first() + settings_obj.save() + + if settings_obj.intended_repository: + settings_obj.intended_repository_tmp = settings_obj.intended_repository.first() + settings_obj.save() + + +def convert_many_repos_part2(apps, schema_editor): + """ + Add the current `backup_repository_tmp` and `intended_repository_tmp` object values + to the FKs final attributes to retain data.` + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + + if settings_obj.backup_repository_tmp: + settings_obj.backup_repository = settings_obj.backup_repository_tmp + settings_obj.save() + + if settings_obj.intended_repository_tmp: + settings_obj.intended_repository = settings_obj.intended_repository_tmp + settings_obj.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('extras', '0018_joblog_data_migration'), + ('nautobot_golden_config', '0008_multi_repo_support_final'), + ] + + operations = [ + migrations.AlterModelOptions( + name='goldenconfigsetting', + options={'ordering': ['-weight', 'name'], 'verbose_name': 'Golden Config Setting'}, + ), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='backup_match_rule', + ), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='intended_match_rule', + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='description', + field=models.CharField(blank=True, max_length=200), + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='name', + field=models.CharField(default='Default Settings', max_length=100, unique=True), + preserve_default=False, + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='slug', + field=models.SlugField(default='default', max_length=100, unique=True), + preserve_default=False, + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='weight', + field=models.PositiveSmallIntegerField(default=1000), + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='backup_repository_tmp', + field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.backupconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='backup_repository', to='extras.gitrepository'), + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='intended_repository_tmp', + field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.intendedconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='intended_repository', to='extras.gitrepository'), + ), + migrations.RunPython(convert_many_repos_part1), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='backup_repository', + ), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='intended_repository', + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='backup_repository', + field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.backupconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='backup_repository', to='extras.gitrepository'), + ), + migrations.AddField( + model_name='goldenconfigsetting', + name='intended_repository', + field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.intendedconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='intended_repository', to='extras.gitrepository'), + ), + migrations.RunPython(convert_many_repos_part2), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='backup_repository_tmp', + ), + migrations.RemoveField( + model_name='goldenconfigsetting', + name='intended_repository_tmp', + ), + ] diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index 2b3a8a36..dd39978d 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -416,19 +416,21 @@ def __str__(self): class GoldenConfigSetting(PrimaryModel): """GoldenConfigSetting Model defintion. This provides global configs instead of via configs.py.""" - backup_repository = models.ManyToManyField( + name = models.CharField(max_length=100, unique=True, blank=False) + slug = models.SlugField(max_length=100, unique=True, blank=False) + weight = models.PositiveSmallIntegerField(default=1000, blank=False) + description = models.CharField( + max_length=200, + blank=True, + ) + backup_repository = models.ForeignKey( to="extras.GitRepository", + on_delete=models.SET_NULL, + null=True, blank=True, related_name="backup_repository", limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, ) - backup_match_rule = models.CharField( - max_length=255, - null=False, - blank=True, - verbose_name="Rule to match a device to a Backup Repository.", - help_text="The Jinja path representation of a Backup Repository slug. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `backup-{{obj.site.region.slug}}`", - ) backup_path_template = models.CharField( max_length=255, null=False, @@ -436,19 +438,14 @@ class GoldenConfigSetting(PrimaryModel): verbose_name="Backup Path in Jinja Template Form", help_text="The Jinja path representation of where the backup file will be found. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `{{obj.site.slug}}/{{obj.name}}.cfg`", ) - intended_repository = models.ManyToManyField( + intended_repository = models.ForeignKey( to="extras.GitRepository", + on_delete=models.SET_NULL, + null=True, blank=True, related_name="intended_repository", limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, ) - intended_match_rule = models.CharField( - max_length=255, - null=False, - blank=True, - verbose_name="Rule to match a device to an Intended Repository.", - help_text="The Jinja path representation of a Intended Repository slug. The variable `obj` is available as the device instance object of a given device, as is the case for all Jinja templates. e.g. `intended-{{obj.site.region.slug}}`", - ) intended_path_template = models.CharField( max_length=255, null=False, @@ -492,29 +489,25 @@ class GoldenConfigSetting(PrimaryModel): def get_absolute_url(self): # pylint: disable=no-self-use """Return absolute URL for instance.""" - return reverse("plugins:nautobot_golden_config:goldenconfigsetting") + return reverse("plugins:nautobot_golden_config:goldenconfigsetting", args=[self.slug]) def __str__(self): """Return a simple string if model is called.""" - return "Configuration Object" - - def delete(self, *args, **kwargs): - """Enforce the singleton pattern, there is no way to delete the configurations.""" + return f"Golden Config Setting - {self.name}" class Meta: - """Set unique fields for model.""" + """Set unique fields for model. - verbose_name = "Golden Config Setting" + Provide ordering used in tables and get_device_to_settings_map. + Sorting on weight is performed from the highest weight value to the lowest weight value. + This is to ensure only one plugin settings could be applied per single device based on priority and name. + """ - @classmethod - def load(cls): - """Enforce the singleton pattern, fail it somehow more than one instance.""" - if len(cls.objects.all()) != 1: - raise ValidationError("There was an error where more than one instance existed for a setting.") - return cls.objects.first() + verbose_name = "Golden Config Setting" + ordering = ["-weight", "name"] # Refer to weight comment in class docstring. def clean(self): - """Validate there is only one model and if there is a GraphQL query, that it is valid.""" + """Validate the scope and GraphQL query.""" super().clean() if self.sot_agg_query: diff --git a/nautobot_golden_config/navigation.py b/nautobot_golden_config/navigation.py index 7591b172..5f668fc9 100644 --- a/nautobot_golden_config/navigation.py +++ b/nautobot_golden_config/navigation.py @@ -90,16 +90,16 @@ plugin_items.append( PluginMenuItem( - link="plugins:nautobot_golden_config:goldenconfigsetting", + link="plugins:nautobot_golden_config:goldenconfigsetting_list", link_text="Settings", - permissions=["nautobot_golden_config.view_compliancereplace"], + permissions=["nautobot_golden_config.view_goldenconfigsetting"], buttons=( PluginMenuButton( - link="plugins:nautobot_golden_config:goldenconfigsetting_edit", - title="Golden Config Settings", - icon_class="mdi mdi-pencil", - color=ButtonColorChoices.YELLOW, - permissions=["nautobot_golden_config.edit_goldenconfigsetting"], + link="plugins:nautobot_golden_config:goldenconfigsetting_add", + title="Add", + icon_class="mdi mdi-plus-thick", + color=ButtonColorChoices.GREEN, + permissions=["nautobot_golden_config.change_goldenconfigsetting"], ), ), ), diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py index 8cd9b8f8..6e73450d 100644 --- a/nautobot_golden_config/nornir_plays/config_backup.py +++ b/nautobot_golden_config/nornir_plays/config_backup.py @@ -16,13 +16,12 @@ from nautobot_golden_config.utilities.helper import ( + get_device_to_settings_map, get_job_filter, - get_repository_working_dir, - verify_global_settings, + verify_settings, render_jinja_template, ) from nautobot_golden_config.models import ( - GoldenConfigSetting, GoldenConfig, ConfigRemove, ConfigReplace, @@ -33,7 +32,7 @@ def run_backup( # pylint: disable=too-many-arguments - task: Task, logger, global_settings, remove_regex_dict, replace_regex_dict + task: Task, logger, device_to_settings_map, remove_regex_dict, replace_regex_dict ) -> Result: r"""Backup configurations to disk. @@ -46,6 +45,7 @@ def run_backup( # pylint: disable=too-many-arguments result (Result): Result from Nornir task """ obj = task.host.data["obj"] + settings = device_to_settings_map[obj.id] backup_obj = GoldenConfig.objects.filter(device=obj).first() if not backup_obj: @@ -55,11 +55,11 @@ def run_backup( # pylint: disable=too-many-arguments backup_obj.backup_last_attempt_date = task.host.defaults.data["now"] backup_obj.save() - backup_directory = get_repository_working_dir("backup", obj, logger, global_settings) - backup_path_template_obj = render_jinja_template(obj, logger, global_settings.backup_path_template) + backup_directory = settings.backup_repository.filesystem_path + backup_path_template_obj = render_jinja_template(obj, logger, settings.backup_path_template) backup_file = os.path.join(backup_directory, backup_path_template_obj) - if global_settings.backup_test_connectivity is not False: + if settings.backup_test_connectivity is not False: task.run( task=dispatcher, name="TEST CONNECTIVITY", @@ -93,8 +93,12 @@ def config_backup(job_result, data): """Nornir play to backup configurations.""" now = datetime.now() logger = NornirLogger(__name__, job_result, data.get("debug")) - global_settings = GoldenConfigSetting.objects.first() - verify_global_settings(logger, global_settings, ["backup_path_template"]) + + qs = get_job_filter(data) + device_to_settings_map = get_device_to_settings_map(queryset=qs) + + for settings in set(device_to_settings_map.values()): + verify_settings(logger, settings, ["backup_path_template"]) # Build a dictionary, with keys of platform.slug, and the regex line in it for the netutils func. remove_regex_dict = {} @@ -118,7 +122,7 @@ def config_backup(job_result, data): "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), + "queryset": qs, "defaults": {"now": now}, }, }, @@ -130,7 +134,7 @@ def config_backup(job_result, data): task=run_backup, name="BACKUP CONFIG", logger=logger, - global_settings=global_settings, + device_to_settings_map=device_to_settings_map, remove_regex_dict=remove_regex_dict, replace_regex_dict=replace_regex_dict, ) diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py index 3a30ed80..33279164 100644 --- a/nautobot_golden_config/nornir_plays/config_compliance.py +++ b/nautobot_golden_config/nornir_plays/config_compliance.py @@ -19,9 +19,9 @@ from nautobot_golden_config.models import ComplianceRule, ConfigCompliance, GoldenConfigSetting, GoldenConfig from nautobot_golden_config.utilities.helper import ( + get_device_to_settings_map, get_job_filter, - get_repository_working_dir, - verify_global_settings, + verify_settings, render_jinja_template, ) from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig @@ -56,7 +56,7 @@ def diff_files(backup_file, intended_file): def run_compliance( # pylint: disable=too-many-arguments,too-many-locals task: Task, logger, - global_settings, + device_to_settings_map, rules, ) -> Result: """Prepare data for compliance task. @@ -68,6 +68,7 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals result (Result): Result from Nornir task """ obj = task.host.data["obj"] + settings = device_to_settings_map[obj.id] compliance_obj = GoldenConfig.objects.filter(device=obj).first() if not compliance_obj: @@ -75,18 +76,18 @@ def run_compliance( # pylint: disable=too-many-arguments,too-many-locals compliance_obj.compliance_last_attempt_date = task.host.defaults.data["now"] compliance_obj.save() - intended_directory = get_repository_working_dir("intended", obj, logger, global_settings) - - intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) + intended_directory = settings.intended_repository.filesystem_path + intended_path_template_obj = render_jinja_template(obj, logger, settings.intended_path_template) intended_file = os.path.join(intended_directory, intended_path_template_obj) + if not os.path.exists(intended_file): logger.log_failure(obj, f"Unable to locate intended file for device at {intended_file}") raise NornirNautobotException() - backup_directory = get_repository_working_dir("backup", obj, logger, global_settings) - - backup_template = render_jinja_template(obj, logger, global_settings.backup_path_template) + backup_directory = settings.backup_repository.filesystem_path + backup_template = render_jinja_template(obj, logger, settings.backup_path_template) backup_file = os.path.join(backup_directory, backup_template) + if not os.path.exists(backup_file): logger.log_failure(obj, f"Unable to locate backup file for device at {backup_file}") raise NornirNautobotException() @@ -130,8 +131,13 @@ def config_compliance(job_result, data): now = datetime.now() rules = get_rules() logger = NornirLogger(__name__, job_result, data.get("debug")) - global_settings = GoldenConfigSetting.objects.first() - verify_global_settings(logger, global_settings, ["backup_path_template", "intended_path_template"]) + + qs = get_job_filter(data) + device_to_settings_map = get_device_to_settings_map(queryset=qs) + + for settings in set(device_to_settings_map.values()): + verify_settings(logger, settings, ["backup_path_template", "intended_path_template"]) + try: with InitNornir( runner=NORNIR_SETTINGS.get("runner"), @@ -141,7 +147,7 @@ def config_compliance(job_result, data): "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), + "queryset": qs, "defaults": {"now": now}, }, }, @@ -153,7 +159,7 @@ def config_compliance(job_result, data): task=run_compliance, name="RENDER COMPLIANCE TASK GROUP", logger=logger, - global_settings=global_settings, + device_to_settings_map=device_to_settings_map, rules=rules, ) diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index 42078c39..2ee3663a 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -20,9 +20,9 @@ from nautobot_golden_config.models import GoldenConfigSetting, GoldenConfig from nautobot_golden_config.utilities.helper import ( + get_device_to_settings_map, get_job_filter, - get_repository_working_dir, - verify_global_settings, + verify_settings, render_jinja_template, ) from nautobot_golden_config.utilities.graphql import graph_ql_query @@ -36,7 +36,7 @@ def run_template( # pylint: disable=too-many-arguments - task: Task, logger, global_settings, nautobot_job, jinja_root_path + task: Task, logger, device_to_settings_map, nautobot_job ) -> Result: """Render Jinja Template. @@ -47,12 +47,12 @@ def run_template( # pylint: disable=too-many-arguments logger (NornirLogger): Logger to log messages to. global_settings (GoldenConfigSetting): The settings for GoldenConfigPlugin. nautobot_job (Result): The the output from the Nautobot Job instance being run. - jinja_root_path (str): The root path to the Jinja2 intended config file. Returns: result (Result): Result from Nornir task """ obj = task.host.data["obj"] + settings = device_to_settings_map[obj.id] intended_obj = GoldenConfig.objects.filter(device=obj).first() if not intended_obj: @@ -60,12 +60,12 @@ def run_template( # pylint: disable=too-many-arguments intended_obj.intended_last_attempt_date = task.host.defaults.data["now"] intended_obj.save() - intended_directory = get_repository_working_dir("intended", obj, logger, global_settings) - intended_path_template_obj = render_jinja_template(obj, logger, global_settings.intended_path_template) + intended_directory = settings.intended_repository.filesystem_path + intended_path_template_obj = render_jinja_template(obj, logger, settings.intended_path_template) output_file_location = os.path.join(intended_directory, intended_path_template_obj) - jinja_template = render_jinja_template(obj, logger, global_settings.jinja_path_template) - status, device_data = graph_ql_query(nautobot_job.request, obj, global_settings.sot_agg_query) + jinja_template = render_jinja_template(obj, logger, settings.jinja_path_template) + status, device_data = graph_ql_query(nautobot_job.request, obj, settings.sot_agg_query) if status != 200: logger.log_failure(obj, f"The GraphQL query return a status of {str(status)} with error of {str(device_data)}") raise NornirNautobotException() @@ -78,7 +78,7 @@ def run_template( # pylint: disable=too-many-arguments obj=obj, logger=logger, jinja_template=jinja_template, - jinja_root_path=jinja_root_path, + jinja_root_path=settings.jinja_repository.path, output_file_location=output_file_location, default_drivers_mapping=get_dispatcher(), jinja_filters=jinja_env.filters, @@ -92,22 +92,26 @@ def run_template( # pylint: disable=too-many-arguments return Result(host=task.host, result=generated_config) -def config_intended(nautobot_job, data, jinja_root_path): +def config_intended(nautobot_job, data): """ Nornir play to generate configurations. Args: nautobot_job (Result): The Nautobot Job instance being run. data (dict): Form data from Nautobot Job. - jinja_root_path (str): The root path to the Jinja2 intended config file. Returns: None: Intended configuration files are written to filesystem. """ now = datetime.now() logger = NornirLogger(__name__, nautobot_job, data.get("debug")) - global_settings = GoldenConfigSetting.objects.first() - verify_global_settings(logger, global_settings, ["jinja_path_template", "intended_path_template", "sot_agg_query"]) + + qs = get_job_filter(data) + device_to_settings_map = get_device_to_settings_map(queryset=qs) + + for settings in set(device_to_settings_map.values()): + verify_settings(logger, settings, ["jinja_path_template", "intended_path_template", "sot_agg_query"]) + try: with InitNornir( runner=NORNIR_SETTINGS.get("runner"), @@ -117,7 +121,7 @@ def config_intended(nautobot_job, data, jinja_root_path): "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "params": NORNIR_SETTINGS.get("inventory_params"), - "queryset": get_job_filter(data), + "queryset": qs, "defaults": {"now": now}, }, }, @@ -130,9 +134,8 @@ def config_intended(nautobot_job, data, jinja_root_path): task=run_template, name="RENDER CONFIG", logger=logger, - global_settings=global_settings, + device_to_settings_map=device_to_settings_map, nautobot_job=nautobot_job, - jinja_root_path=jinja_root_path, ) except Exception as err: diff --git a/nautobot_golden_config/tables.py b/nautobot_golden_config/tables.py index e01ee415..e9faced2 100644 --- a/nautobot_golden_config/tables.py +++ b/nautobot_golden_config/tables.py @@ -357,3 +357,55 @@ class Meta(BaseTable.Meta): model = models.ConfigReplace fields = ("pk", "name", "platform", "description", "regex", "replace") default_columns = ("pk", "name", "platform", "description", "regex", "replace") + + +class GoldenConfigSettingTable(BaseTable): + # pylint: disable=R0903 + """Table for list view.""" + + pk = ToggleColumn() + name = Column(order_by=("_name",), linkify=True) + jinja_repository = Column( + verbose_name="Jinja Repository", + empty_values=(), + ) + intended_repository = Column( + verbose_name="Intended Repository", + empty_values=(), + ) + backup_repository = Column( + verbose_name="Backup Repository", + empty_values=(), + ) + + def _render_capability(self, record, column, record_attribute): # pylint: disable=unused-argument, no-self-use + if getattr(record, record_attribute, None): # pylint: disable=no-else-return + return "✔" + + return "✘" + + def render_backup_repository(self, record, column): # pylint: disable=no-self-use + """Render backup repository YES/NO value.""" + return self._render_capability(record=record, column=column, record_attribute="backup_repository") + + def render_intended_repository(self, record, column): # pylint: disable=no-self-use + """Render intended repository YES/NO value.""" + return self._render_capability(record=record, column=column, record_attribute="intended_repository") + + def render_jinja_repository(self, record, column): # pylint: disable=no-self-use + """Render jinja repository YES/NO value.""" + return self._render_capability(record=record, column=column, record_attribute="jinja_repository") + + class Meta(BaseTable.Meta): + """Meta attributes.""" + + model = models.GoldenConfigSetting + fields = ( + "pk", + "name", + "weight", + "description", + "backup_repository", + "intended_repository", + "jinja_repository", + ) diff --git a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html index 549a0f6e..3a022bc3 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html @@ -1,139 +1,247 @@ {% extends 'base.html' %} {% load buttons %} -{% load static %} {% load custom_links %} {% load helpers %} - -{% block title %}{{ object }}{% endblock %} +{% load perms %} +{% load plugins %} +{% load static %} +{% load tz %} {% block header %} -
- {% if perms.nautobot_golden_config.change_goldenconfigsetting %} - - Edit - - {% endif %} -
-

Golden Configuration Settings

-{% include 'inc/created_updated.html' %} -
- {% custom_links object %} -
- -{% endblock %} +
+ {% with list_url=object|validated_viewname:"list" %} +
+ +
+
+
+
+ + + + +
+
+
+ {% endwith %} +
+
+ {% block buttons %} + {% if user|can_change:object %} + {% edit_button object %} + {% endif %} + {% if user|can_delete:object %} + {% delete_button object %} + {% endif %} + {% endblock buttons %} +
+

{{ object }}

+ {% include 'inc/created_updated.html' %} +
+
+ + +{% endblock header %} {% block content %} - -
-
-
-
- Settings +
+
+
+
+
+
+ General Settings +
+ + + + + + + + + + + + + +
Weight{{ object.weight }}
Description{{ object.description|placeholder }}
Scope of Devices +
{{ object.scope|render_json }}
+
+
+
+
+
+
+ Backup Configuration +
+ + + + + + + + + + + + + +
+ Backup Repository + + {% if object.backup_repository %} + {{ object.backup_repository }} + {% else %} + None + {% endif %} +
+ Backup Path in Jinja Template Form + + {{ object.backup_path_template|placeholder }} +
+ Backup Test + + {{ object.backup_test_connectivity|placeholder }} +
+
+
+
+ Intended Configuration +
+ + + + + + + + + +
+ Intended Repository + + {% if object.intended_repository %} + {{ object.intended_repository }} + {% else %} + None + {% endif %} +
+ Intended Path in Jinja Template Form + + {{ object.intended_path_template|placeholder }} +
+
+
+
+ Templates Configuration +
+ + + + + + + + + + + + + + +
+ Jinja Repository + + {% if object.jinja_repository %} + {{ object.jinja_repository }} + {% else %} + None + {% endif %} +
+ Template Path in Jinja Template Form + + {{ object.jinja_path_template|placeholder }} +
GraphQL Query +
{{ object.sot_agg_query|render_json }}
+
+
+
+
+ +
+
+ {% block content_full_width_page %}{% endblock content_full_width_page %} + {% plugin_full_width_page object %} +
+
+
+
+
+
+ {% include 'inc/object_details_advanced_panel.html' %} +
+
+ {% block advanced_content_right_page %}{% endblock advanced_content_right_page %} +
+
+
+
+ {% block advanced_content_full_width_page %}{% endblock advanced_content_full_width_page %} +
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Backup Repositories - {% for item in object.backup_repository.all %} - {% if object.backup_repository %} - {% include "nautobot_golden_config/manytomany.html" with url_name="extras:gitrepository" %} - {% endif %} - {% endfor %} -
Backup Repository Matching Rule -
{{ object.backup_match_rule }}
-
Backup Path Template -
{{ object.backup_path_template }}
-
Intended Repositories - {% for item in object.intended_repository.all %} - {% if object.intended_repository %} - {% include "nautobot_golden_config/manytomany.html" with url_name="extras:gitrepository" %} - {% endif %} - {% endfor %} -
Intended Repository Matching Rule -
{{ object.intended_match_rule }}
-
Intended Path Template -
{{ object.intended_path_template }}
-
Jinja Repository - {% if object.jinja_repository %} - {{ object.jinja_repository }} - {% endif %} -
Jinja Path Template -
{{ object.jinja_path_template }}
-
Backup Test Connectivity - {% if object.backup_test_connectivity %} - - - - {% else %} - - - - {% endif %} -
Scope of Devices -
{{ object.scope|render_json }}
-
SoT Aggregation Query -
{{ object.sot_agg_query }}
-
- {% include 'inc/custom_fields_panel.html' %} - {% include 'inc/relationships_panel.html' %} -
-
-{% endblock %} \ No newline at end of file +{% endblock content %} + +{% block javascript %} + + +{% endblock javascript %} diff --git a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting_edit.html b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting_edit.html new file mode 100644 index 00000000..5f785609 --- /dev/null +++ b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting_edit.html @@ -0,0 +1,42 @@ +{% extends 'generic/object_edit.html' %} +{% load form_helpers %} + +{% block form %} +
+
GoldenConfigSetting
+
+ {% render_field form.name %} + {% render_field form.slug %} + {% render_field form.weight %} + {% render_field form.description %} + {% render_field form.scope %} +
+
+ +
+
Backup Configuration
+
+ {% render_field form.backup_repository %} + {% render_field form.backup_path_template %} + {% render_field form.backup_test_connectivity %} +
+
+ +
+
Intended Configuration
+
+ {% render_field form.intended_repository %} + {% render_field form.intended_path_template %} +
+
+ +
+
Templates Configuration
+
+ {% render_field form.jinja_repository %} + {% render_field form.jinja_path_template %} + {% render_field form.sot_agg_query %} +
+
+ +{% endblock %} diff --git a/nautobot_golden_config/tests/forms/test_golden_config_settings.py b/nautobot_golden_config/tests/forms/test_golden_config_settings.py index 9af047ce..c4f6c82a 100644 --- a/nautobot_golden_config/tests/forms/test_golden_config_settings.py +++ b/nautobot_golden_config/tests/forms/test_golden_config_settings.py @@ -21,17 +21,13 @@ def test_no_query_no_scope_success(self): """Testing GoldenConfigForm without specifying a unique scope or GraphQL Query.""" form = GoldenConfigSettingFeatureForm( data={ - "backup_repository": [ - GitRepository.objects.get(name="test-backup-repo-1"), - GitRepository.objects.get(name="test-backup-repo-2"), - ], - "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", + "name": "test", + "slug": "test", + "weight": 1000, + "description": "Test description.", + "backup_repository": GitRepository.objects.get(name="test-backup-repo-1"), "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ], - "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", + "intended_repository": GitRepository.objects.get(name="test-intended-repo-1"), "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", "backup_test_connectivity": True, } @@ -39,102 +35,6 @@ def test_no_query_no_scope_success(self): self.assertTrue(form.is_valid()) self.assertTrue(form.save()) - def test_clean_backup_template(self): - """Testing clean method for single backup repo with a matching pattern.""" - form = GoldenConfigSettingFeatureForm( - data={ - "backup_repository": [GitRepository.objects.get(name="test-backup-repo-2")], - "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", - "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ], - "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", - "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", - "backup_test_connectivity": True, - } - ) - self.assertFalse(form.is_valid()) - self.assertEqual( - form.non_field_errors(), - ["If you configure only one backup repository, do not enter the backup repository matching rule template."], - ) - - def test_clean_backup_template_missing_match_rule(self): - """Testing clean method for multiple backup repos without a matching pattern.""" - form = GoldenConfigSettingFeatureForm( - data={ - "backup_repository": [ - GitRepository.objects.get(name="test-backup-repo-1"), - GitRepository.objects.get(name="test-backup-repo-2"), - ], - "backup_match_rule": "", - "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ], - "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", - "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", - "backup_test_connectivity": True, - } - ) - self.assertFalse(form.is_valid()) - self.assertEqual( - form.non_field_errors(), - [ - "If you specify more than one backup repository, you must provide the backup repository matching rule template." - ], - ) - - def test_clean_intended_template(self): - """Testing clean method for single intended repo with a matching pattern.""" - form = GoldenConfigSettingFeatureForm( - data={ - "backup_repository": [GitRepository.objects.get(name="test-backup-repo-2")], - "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [GitRepository.objects.get(name="test-intended-repo-1")], - "intended_match_rule": "intended-{{ obj.site.region.parent.slug }}", - "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", - "backup_test_connectivity": True, - } - ) - self.assertFalse(form.is_valid()) - self.assertEqual( - form.non_field_errors(), - [ - "If you configure only one intended repository, do not enter the intended repository matching rule template." - ], - ) - - def test_clean_intended_template_missing_match_rule(self): - """Testing clean method for multiple intended repos without a matching pattern.""" - form = GoldenConfigSettingFeatureForm( - data={ - "backup_repository": [ - GitRepository.objects.get(name="test-backup-repo-1"), - GitRepository.objects.get(name="test-backup-repo-2"), - ], - "backup_match_rule": "backup-{{ obj.site.region.parent.slug }}", - "backup_path_template": "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - "intended_repository": [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ], - "intended_match_rule": "", - "intended_path_template": "{{ obj.site.slug }}/{{ obj.name }}.cfg", - "backup_test_connectivity": True, - } - ) - self.assertFalse(form.is_valid()) - self.assertEqual( - form.non_field_errors(), - [ - "If you specify more than one intended repository, you must provide the intended repository matching rule template." - ], - ) - def test_clean_up(self): """Transactional custom model, unable to use `get_or_create`. @@ -142,7 +42,3 @@ def test_clean_up(self): """ GitRepository.objects.all().delete() self.assertEqual(GitRepository.objects.all().count(), 0) - - # Put back a general GoldenConfigSetting object. - global_settings = GoldenConfigSetting.objects.create() - global_settings.save() diff --git a/nautobot_golden_config/tests/test_api.py b/nautobot_golden_config/tests/test_api.py index 6ef0df03..b42c2d7b 100644 --- a/nautobot_golden_config/tests/test_api.py +++ b/nautobot_golden_config/tests/test_api.py @@ -97,121 +97,27 @@ def setUp(self): self.add_permissions("nautobot_golden_config.change_goldenconfigsetting") self.base_view = reverse("plugins-api:nautobot_golden_config-api:goldenconfigsetting-list") self.data = { + "name": "test-setting-1", + "slug": "test_setting_1", + "description": "This is a description field of test-setting-1.", + "weight": 5000, "tags": [], "computed_fields": {}, "custom_fields": {}, "_custom_field_data": {}, - "backup_match_rule": "backup-{{obj.site.region.parent.slug}}", "backup_path_template": "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg", - "intended_match_rule": "intended-{{obj.site.region.parent.slug}}", "intended_path_template": "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg", "jinja_path_template": "templates/{{obj.platform.slug}}/{{obj.platform.slug}}_main.j2", "backup_test_connectivity": False, "scope": {"has_primary_ip": "True"}, "sot_agg_query": "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", "jinja_repository": str(GitRepository.objects.get(name="test-jinja-repo-1").id), - "backup_repository": [ - str(GitRepository.objects.get(name="test-backup-repo-1").id), - str(GitRepository.objects.get(name="test-backup-repo-2").id), - ], - "intended_repository": [ - str(GitRepository.objects.get(name="test-intended-repo-1").id), - str(GitRepository.objects.get(name="test-intended-repo-2").id), - ], + "backup_repository": str(GitRepository.objects.get(name="test-backup-repo-1").id), + "intended_repository": str(GitRepository.objects.get(name="test-intended-repo-1").id), } - # Since we enforce a singleton pattern on this model, nuke any auto-created objects. + # Since we enforced a singleton pattern on this model in 0.9 release migrations, nuke any auto-created objects. GoldenConfigSetting.objects.all().delete() - def test_golden_config_settings_create_1backup_with_match_rule(self): - """Verify that an invalid POST with an unnecessary match_rule returns an error.""" - bad_data = deepcopy(self.data) - bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] - response = self.client.post( - self.base_view, - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you configure only one backup repository, do not enter the backup repository matching rule template.", - ) - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_create_backup_match_rule_missing(self): - """Verify that an invalid POST with a missing backup_match_rule returns an error.""" - bad_data = deepcopy(self.data) - bad_data["backup_match_rule"] = "" - response = self.client.post( - self.base_view, - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you specify more than one backup repository, you must provide the backup repository matching rule template.", - ) - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_create_1intended_with_match_rule(self): - """Verify that an invalid POST with an unnecessary match_rule returns an error.""" - bad_data = deepcopy(self.data) - bad_data["intended_repository"] = [str(GitRepository.objects.get(name="test-intended-repo-2").id)] - response = self.client.post( - self.base_view, - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you configure only one intended repository, do not enter the intended repository matching rule template.", - ) - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_create_intended_match_rule_missing(self): - """Verify that an invalid POST with a missing intended_match_rule returns an error.""" - bad_data = deepcopy(self.data) - bad_data["intended_match_rule"] = "" - response = self.client.post( - self.base_view, - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you specify more than one intended repository, you must provide the intended repository matching rule template.", - ) - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_create_multiple_problems(self): - """Verify that an invalid POST with multiple problems return multiple, correct errors.""" - bad_data = deepcopy(self.data) - bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] - bad_data["intended_match_rule"] = "" - response = self.client.post( - self.base_view, - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you configure only one backup repository, do not enter the backup repository matching rule template.", - ) - self.assertEqual( - response.data["non_field_errors"][1], - "If you specify more than one intended repository, you must provide the intended repository matching rule template.", - ) - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - def test_golden_config_settings_create_good(self): """Test a POST with good values.""" response = self.client.post( @@ -223,11 +129,9 @@ def test_golden_config_settings_create_good(self): self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertTrue(response.data["created"]) self.assertTrue(response.data["id"]) - self.assertEqual(response.data["backup_match_rule"], "backup-{{obj.site.region.parent.slug}}") self.assertEqual( response.data["backup_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" ) - self.assertEqual(response.data["intended_match_rule"], "intended-{{obj.site.region.parent.slug}}") self.assertEqual( response.data["intended_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" ) @@ -241,19 +145,9 @@ def test_golden_config_settings_create_good(self): "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", ) self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) + self.assertEqual(response.data["backup_repository"], GitRepository.objects.get(name="test-backup-repo-1").id) self.assertEqual( - response.data["backup_repository"], - [ - GitRepository.objects.get(name="test-backup-repo-1").id, - GitRepository.objects.get(name="test-backup-repo-2").id, - ], - ) - self.assertEqual( - response.data["intended_repository"], - [ - GitRepository.objects.get(name="test-intended-repo-1").id, - GitRepository.objects.get(name="test-intended-repo-2").id, - ], + response.data["intended_repository"], GitRepository.objects.get(name="test-intended-repo-1").id ) # Clean up GoldenConfigSetting.objects.all().delete() @@ -268,8 +162,7 @@ def test_golden_config_settings_update_good(self): **self.header, ) new_data = deepcopy(self.data) - new_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] - new_data["backup_match_rule"] = "" + new_data["backup_repository"] = str(GitRepository.objects.get(name="test-backup-repo-1").id) response = self.client.put( f"{self.base_view}{response_post.data['id']}/", data=new_data, @@ -277,11 +170,9 @@ def test_golden_config_settings_update_good(self): **self.header, ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data["backup_match_rule"], "") self.assertEqual( response.data["backup_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" ) - self.assertEqual(response.data["intended_match_rule"], "intended-{{obj.site.region.parent.slug}}") self.assertEqual( response.data["intended_path_template"], "{{obj.site.region.slug}}/{{obj.site.slug}}/{{obj.name}}.cfg" ) @@ -295,118 +186,9 @@ def test_golden_config_settings_update_good(self): "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", ) self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) + self.assertEqual(response.data["backup_repository"], GitRepository.objects.get(name="test-backup-repo-1").id) self.assertEqual( - response.data["backup_repository"], - [ - GitRepository.objects.get(name="test-backup-repo-1").id, - ], - ) - self.assertEqual( - response.data["intended_repository"], - [ - GitRepository.objects.get(name="test-intended-repo-1").id, - GitRepository.objects.get(name="test-intended-repo-2").id, - ], - ) - # Clean up - GoldenConfigSetting.objects.all().delete() - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_update_1backup_with_match_rule(self): - """Verify a PUT to the valid settings object, with an invalid backup repo set, returns a 400.""" - response_post = self.client.post( - self.base_view, - data=self.data, - format="json", - **self.header, - ) - bad_data = deepcopy(self.data) - bad_data["backup_repository"] = [str(GitRepository.objects.get(name="test-backup-repo-1").id)] - response = self.client.put( - f"{self.base_view}{response_post.data['id']}/", - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you configure only one backup repository, do not enter the backup repository matching rule template.", - ) - # Clean up - GoldenConfigSetting.objects.all().delete() - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_update_backup_match_rule_missing(self): - """Verify a PUT to the valid settings object, with an invalid backup repo set, returns a 400.""" - response_post = self.client.post( - self.base_view, - data=self.data, - format="json", - **self.header, - ) - bad_data = deepcopy(self.data) - bad_data["backup_match_rule"] = "" - response = self.client.put( - f"{self.base_view}{response_post.data['id']}/", - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you specify more than one backup repository, you must provide the backup repository matching rule template.", - ) - # Clean up - GoldenConfigSetting.objects.all().delete() - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_update_1intended_with_match_rule(self): - """Verify a PUT to the valid settings object, with an invalid intended repo set, returns a 400.""" - response_post = self.client.post( - self.base_view, - data=self.data, - format="json", - **self.header, - ) - bad_data = deepcopy(self.data) - bad_data["intended_repository"] = [str(GitRepository.objects.get(name="test-intended-repo-1").id)] - response = self.client.put( - f"{self.base_view}{response_post.data['id']}/", - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you configure only one intended repository, do not enter the intended repository matching rule template.", - ) - # Clean up - GoldenConfigSetting.objects.all().delete() - self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) - - def test_golden_config_settings_update_intended_match_rule_missing(self): - """Verify a PUT to the valid settings object, with an invalid intended repo set, returns a 400.""" - response_post = self.client.post( - self.base_view, - data=self.data, - format="json", - **self.header, - ) - bad_data = deepcopy(self.data) - bad_data["intended_match_rule"] = "" - response = self.client.put( - f"{self.base_view}{response_post.data['id']}/", - data=bad_data, - format="json", - **self.header, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.data["non_field_errors"][0], - "If you specify more than one intended repository, you must provide the intended repository matching rule template.", + response.data["intended_repository"], GitRepository.objects.get(name="test-intended-repo-1").id ) # Clean up GoldenConfigSetting.objects.all().delete() diff --git a/nautobot_golden_config/tests/test_graphql.py b/nautobot_golden_config/tests/test_graphql.py index 6e82faf2..4ff4746f 100644 --- a/nautobot_golden_config/tests/test_graphql.py +++ b/nautobot_golden_config/tests/test_graphql.py @@ -118,26 +118,30 @@ def setUp(self): git_obj = GitRepository.objects.create(**item) git_obj.save() - backup_repo_list = GitRepository.objects.filter( - provided_contents__contains="nautobot_golden_config.backupconfigs" - ) - intended_repo_list = GitRepository.objects.filter( - provided_contents__contains="nautobot_golden_config.intendedconfigs" - ) - - GoldenConfigSetting.objects.update( + # Since we enforce a singleton pattern on this model, nuke the auto-created object. + GoldenConfigSetting.objects.all().delete() + + GoldenConfigSetting.objects.create( + name="test_name", + slug="test_slug", + weight=1000, + description="Test Description.", backup_path_template="test/backup", intended_path_template="test/intended", - jinja_repository=GitRepository.objects.get( - provided_contents__contains="nautobot_golden_config.jinjatemplate" - ), jinja_path_template="{{jinja_path}}", backup_test_connectivity=True, scope={"platform": ["platform1"]}, sot_agg_query="{test_model}", + backup_repository=GitRepository.objects.get( + provided_contents__contains="nautobot_golden_config.backupconfigs" + ), + intended_repository=GitRepository.objects.get( + provided_contents__contains="nautobot_golden_config.intendedconfigs" + ), + jinja_repository=GitRepository.objects.get( + provided_contents__contains="nautobot_golden_config.jinjatemplate" + ), ) - GoldenConfigSetting.objects.first().backup_repository.set(backup_repo_list) - GoldenConfigSetting.objects.first().intended_repository.set(intended_repo_list) self.feature1 = ComplianceFeature.objects.create( name="aaa", @@ -295,6 +299,9 @@ def test_query_golden_config_setting(self): query = """ query { golden_config_settings { + name + slug + weight backup_path_template intended_path_template jinja_path_template @@ -306,6 +313,9 @@ def test_query_golden_config_setting(self): response_data = { "golden_config_settings": [ { + "name": "test_name", + "slug": "test_slug", + "weight": 1000, "backup_path_template": "test/backup", "intended_path_template": "test/intended", "jinja_path_template": "{{jinja_path}}", diff --git a/nautobot_golden_config/tests/test_models.py b/nautobot_golden_config/tests/test_models.py index 86b60203..d93cfae9 100644 --- a/nautobot_golden_config/tests/test_models.py +++ b/nautobot_golden_config/tests/test_models.py @@ -88,12 +88,29 @@ class GoldenConfigSettingModelTestCase(TestCase): def setUp(self): """Get the golden config settings with the only allowed id.""" - self.global_settings = GoldenConfigSetting.objects.first() + create_git_repos() + + # Since we enforce a singleton pattern on this model, nuke the auto-created object. + GoldenConfigSetting.objects.all().delete() + + self.global_settings = GoldenConfigSetting.objects.create( # pylint: disable=attribute-defined-outside-init + name="test", + slug="test", + weight=1000, + description="Test Description.", + backup_path_template="{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", + intended_path_template="{{ obj.site.slug }}/{{ obj.name }}.cfg", + backup_test_connectivity=True, + jinja_repository=GitRepository.objects.get(name="test-jinja-repo-1"), + jinja_path_template="{{ obj.platform.slug }}/main.j2", + backup_repository=GitRepository.objects.get(name="test-backup-repo-1"), + intended_repository=GitRepository.objects.get(name="test-intended-repo-1"), + ) def test_absolute_url_success(self): """Verify that get_absolute_url() returns the expected URL.""" url_string = self.global_settings.get_absolute_url() - self.assertEqual(url_string, "/plugins/golden-config/setting/") + self.assertEqual(url_string, f"/plugins/golden-config/setting/{self.global_settings.slug}/") def test_bad_graphql_query(self): """Invalid graphql query.""" @@ -135,71 +152,51 @@ class GoldenConfigSettingGitModelTestCase(TestCase): def setUp(self) -> None: """Setup test data.""" create_git_repos() - # Since we enforce a singleton pattern on this model, nuke the auto-created object. + + # Since we enforced a singleton pattern on this model in 0.9 release migrations, nuke any auto-created objects. GoldenConfigSetting.objects.all().delete() # Create fresh new object, populate accordingly. - self.golden_config = GoldenConfigSetting.objects.create( # pylint: disable=attribute-defined-outside-init - backup_match_rule="backup-{{ obj.site.region.parent.slug }}", + name="test", + slug="test", + weight=1000, + description="Test Description.", backup_path_template="{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg", - intended_match_rule="intended-{{ obj.site.region.parent.slug }}", intended_path_template="{{ obj.site.slug }}/{{ obj.name }}.cfg", backup_test_connectivity=True, jinja_repository=GitRepository.objects.get(name="test-jinja-repo-1"), jinja_path_template="{{ obj.platform.slug }}/main.j2", + backup_repository=GitRepository.objects.get(name="test-backup-repo-1"), + intended_repository=GitRepository.objects.get(name="test-intended-repo-1"), ) - self.golden_config.backup_repository.set( - [ - GitRepository.objects.get(name="test-backup-repo-1"), - GitRepository.objects.get(name="test-backup-repo-2"), - ] - ) - self.golden_config.intended_repository.set( - [ - GitRepository.objects.get(name="test-intended-repo-1"), - GitRepository.objects.get(name="test-intended-repo-2"), - ] - ) - self.golden_config.save() def test_model_success(self): """Create a new instance of the GoldenConfigSettings model.""" - - self.assertEqual(self.golden_config.backup_match_rule, "backup-{{ obj.site.region.parent.slug }}") + self.assertEqual(self.golden_config.name, "test") + self.assertEqual(self.golden_config.slug, "test") + self.assertEqual(self.golden_config.weight, 1000) + self.assertEqual(self.golden_config.description, "Test Description.") self.assertEqual(self.golden_config.backup_path_template, "{{ obj.site.region.parent.slug }}/{{obj.name}}.cfg") - self.assertEqual(self.golden_config.intended_match_rule, "intended-{{ obj.site.region.parent.slug }}") self.assertEqual(self.golden_config.intended_path_template, "{{ obj.site.slug }}/{{ obj.name }}.cfg") self.assertTrue(self.golden_config.backup_test_connectivity) self.assertEqual(self.golden_config.jinja_repository, GitRepository.objects.get(name="test-jinja-repo-1")) self.assertEqual(self.golden_config.jinja_path_template, "{{ obj.platform.slug }}/main.j2") - self.assertEqual( - self.golden_config.backup_repository.first(), GitRepository.objects.get(name="test-backup-repo-1") - ) - self.assertEqual( - self.golden_config.backup_repository.last(), GitRepository.objects.get(name="test-backup-repo-2") - ) - self.assertEqual( - self.golden_config.intended_repository.first(), GitRepository.objects.get(name="test-intended-repo-1") - ) - self.assertEqual( - self.golden_config.intended_repository.last(), GitRepository.objects.get(name="test-intended-repo-2") - ) + self.assertEqual(self.golden_config.backup_repository, GitRepository.objects.get(name="test-backup-repo-1")) + self.assertEqual(self.golden_config.intended_repository, GitRepository.objects.get(name="test-intended-repo-1")) def test_removing_git_repos(self): """Ensure we can remove the Git Repository obejcts from GoldenConfigSetting.""" GitRepository.objects.all().delete() - self.assertEqual(self.golden_config.intended_repository.count(), 0) - self.assertEqual(self.golden_config.backup_repository.count(), 0) + gc = GoldenConfigSetting.objects.all().first() # pylint: disable=invalid-name + self.assertEqual(gc.intended_repository, None) + self.assertEqual(gc.backup_repository, None) self.assertEqual(GoldenConfigSetting.objects.all().count(), 1) def test_clean_up(self): - """Delete all objects created of GitRepository type.""" + """Delete all objects created of GoldenConfigSetting type.""" GoldenConfigSetting.objects.all().delete() - # Put back a general GoldenConfigSetting object. - global_settings = GoldenConfigSetting.objects.create() - global_settings.save() - self.assertEqual(GoldenConfigSetting.objects.all().count(), 1) + self.assertEqual(GoldenConfigSetting.objects.all().count(), 0) class ConfigRemoveModelTestCase(TestCase): diff --git a/nautobot_golden_config/tests/test_utilities/test_helpers.py b/nautobot_golden_config/tests/test_utilities/test_helpers.py index 8a01dd18..5713e3b9 100644 --- a/nautobot_golden_config/tests/test_utilities/test_helpers.py +++ b/nautobot_golden_config/tests/test_utilities/test_helpers.py @@ -16,14 +16,14 @@ null_to_empty, render_jinja_template, get_job_filter, - get_repository_working_dir, + get_device_to_settings_map, ) # pylint: disable=no-self-use -class HelpersTest(TestCase): +class HelpersTest(TestCase): # pylint: disable=too-many-instance-attributes """Test Helper Functions.""" def setUp(self): @@ -34,18 +34,59 @@ def setUp(self): create_helper_repo(name="backup-parent_region-1", provides="backupconfigs") create_helper_repo(name="intended-parent_region-1", provides="intendedconfigs") create_helper_repo(name="test-jinja-repo", provides="jinjatemplate") - self.global_settings = GoldenConfigSetting.objects.first() - self.global_settings.backup_repository.set([GitRepository.objects.get(name="backup-parent_region-1")]) - self.global_settings.intended_repository.set([GitRepository.objects.get(name="intended-parent_region-1")]) - self.global_settings.jinja_repository = GitRepository.objects.get(name="test-jinja-repo") - self.global_settings.backup_match_rule = "backup-{{ obj.site.region.parent.slug }}" - self.global_settings.intended_match_rule = "intended-{{ obj.site.region.parent.slug }}" + + create_helper_repo(name="backup-parent_region-2", provides="backupconfigs") + create_helper_repo(name="intended-parent_region-2", provides="intendedconfigs") + create_helper_repo(name="test-jinja-repo-2", provides="jinjatemplate") + + create_helper_repo(name="backup-parent_region-3", provides="backupconfigs") + create_helper_repo(name="intended-parent_region-3", provides="intendedconfigs") + create_helper_repo(name="test-jinja-repo-3", provides="jinjatemplate") + + # Since we enforce a singleton pattern on this model, nuke the auto-created object. + GoldenConfigSetting.objects.all().delete() + + self.test_settings_a = GoldenConfigSetting.objects.create( + name="test_a", + slug="test_a", + description="test_a", + weight=1000, + backup_repository=GitRepository.objects.get(name="backup-parent_region-1"), + intended_repository=GitRepository.objects.get(name="intended-parent_region-1"), + jinja_repository=GitRepository.objects.get(name="test-jinja-repo"), + # Limit scope to orphaned device only + scope={"site": ["site-4"]}, + ) + + self.test_settings_b = GoldenConfigSetting.objects.create( + name="test_b", + slug="test_b", + description="test_b", + weight=2000, + backup_repository=GitRepository.objects.get(name="backup-parent_region-2"), + intended_repository=GitRepository.objects.get(name="intended-parent_region-2"), + jinja_repository=GitRepository.objects.get(name="test-jinja-repo-2"), + # Limit scope to orphaned device only + scope={"site": ["site-4"]}, + ) + + self.test_settings_c = GoldenConfigSetting.objects.create( + name="test_c", + slug="test_c", + description="test_c", + weight=1000, + backup_repository=GitRepository.objects.get(name="backup-parent_region-3"), + intended_repository=GitRepository.objects.get(name="intended-parent_region-3"), + jinja_repository=GitRepository.objects.get(name="test-jinja-repo-3"), + ) + # Device.objects.all().delete() create_device(name="test_device") create_orphan_device(name="orphan_device") self.job_result = MagicMock() self.data = MagicMock() self.logger = NornirLogger(__name__, self.job_result, self.data) + self.device_to_settings_map = get_device_to_settings_map(queryset=Device.objects.all()) def test_null_to_empty_null(self): """Ensure None returns with empty string.""" @@ -106,49 +147,25 @@ def test_render_jinja_template_exceptions_templateerror(self, template_mock, moc render_jinja_template(mock_device, mock_nornir_logger, "template") mock_nornir_logger.log_failure.assert_called_once() - def test_get_backup_repository_working_dir_success(self): + def test_get_backup_repository_dir_success(self): """Verify that we successfully look up the path from a provided repo object.""" - repo_type = "backup" - result = get_repository_working_dir( - repo_type, Device.objects.get(name="test_device"), self.logger, self.global_settings - ) - self.assertEqual(result, "/opt/nautobot/git/backup-parent_region-1") + device = Device.objects.get(name="test_device") + backup_directory = self.device_to_settings_map[device.id].backup_repository.filesystem_path + self.assertEqual(backup_directory, "/opt/nautobot/git/backup-parent_region-3") + + device = Device.objects.get(name="orphan_device") + backup_directory = self.device_to_settings_map[device.id].backup_repository.filesystem_path + self.assertEqual(backup_directory, "/opt/nautobot/git/backup-parent_region-2") - def test_get_intended_repository_working_dir_success(self): + def test_get_intended_repository_dir_success(self): """Verify that we successfully look up the path from a provided repo object.""" - repo_type = "intended" - result = get_repository_working_dir( - repo_type, Device.objects.get(name="test_device"), self.logger, self.global_settings - ) - self.assertEqual(result, "/opt/nautobot/git/intended-parent_region-1") - - def test_get_backup_repository_working_dir_no_match(self): - """Verify that we return the correct error when there is no matching backup repo.""" - repo_type = "backup" - logger = MagicMock() - result = get_repository_working_dir( - repo_type, Device.objects.get(name="orphan_device"), logger, self.global_settings - ) - self.assertEqual(result, None) - self.assertEqual(logger.log_failure.call_count, 1) - self.assertEqual( - logger.log_failure.call_args[0][1], - "There is no repository slug matching 'backup-parent_region-4' for device. Verify the matching rule and configured Git repositories.", - ) + device = Device.objects.get(name="test_device") + intended_directory = self.device_to_settings_map[device.id].intended_repository.filesystem_path + self.assertEqual(intended_directory, "/opt/nautobot/git/intended-parent_region-3") - def test_get_intended_repository_working_dir_no_match(self): - """Verify that we return the correct error when there is no matching intended repo.""" - repo_type = "intended" - logger = MagicMock() - result = get_repository_working_dir( - repo_type, Device.objects.get(name="orphan_device"), logger, self.global_settings - ) - self.assertEqual(result, None) - self.assertEqual(logger.log_failure.call_count, 1) - self.assertEqual( - logger.log_failure.call_args[0][1], - "There is no repository slug matching 'intended-parent_region-4' for device. Verify the matching rule and configured Git repositories.", - ) + device = Device.objects.get(name="orphan_device") + intended_directory = self.device_to_settings_map[device.id].intended_repository.filesystem_path + self.assertEqual(intended_directory, "/opt/nautobot/git/intended-parent_region-2") def test_get_job_filter_no_data_success(self): """Verify we get two devices returned when providing no data.""" @@ -173,9 +190,9 @@ def test_get_job_filter_device_filter_success(self): def test_get_job_filter_base_queryset_raise(self): """Verify we get raise for having a base_qs with no objects due to bad Golden Config Setting scope.""" Platform.objects.create(name="Placeholder Platform", slug="placeholder-platform") - golden_settings = GoldenConfigSetting.objects.first() - golden_settings.scope = {"platform": ["placeholder-platform"]} - golden_settings.validated_save() + for golden_settings in GoldenConfigSetting.objects.all(): + golden_settings.scope = {"platform": ["placeholder-platform"]} + golden_settings.validated_save() with self.assertRaises(NornirNautobotException) as failure: get_job_filter() self.assertEqual( @@ -205,3 +222,11 @@ def test_get_job_filter_device_no_platform_raise(self): failure.exception.args[0], "The following device(s) test_device have no platform defined. Platform is required.", ) + + def test_device_to_settings_map(self): + """Verify Golden Config Settings are properly mapped to devices.""" + test_device = Device.objects.get(name="test_device") + orphan_device = Device.objects.get(name="orphan_device") + self.assertEqual(self.device_to_settings_map[test_device.id], self.test_settings_c) + self.assertEqual(self.device_to_settings_map[orphan_device.id], self.test_settings_b) + self.assertEqual(get_device_to_settings_map(queryset=Device.objects.none()), {}) diff --git a/nautobot_golden_config/urls.py b/nautobot_golden_config/urls.py index 52d49fc6..86c61814 100644 --- a/nautobot_golden_config/urls.py +++ b/nautobot_golden_config/urls.py @@ -88,14 +88,20 @@ name="compliancefeature_changelog", kwargs={"model": models.ComplianceFeature}, ), - path("setting/", views.GoldenConfigSettingView.as_view(), name="goldenconfigsetting"), - path("setting/edit/", views.GoldenConfigSettingEditView.as_view(), name="goldenconfigsetting_edit"), + path("setting/", views.GoldenConfigSettingListView.as_view(), name="goldenconfigsetting_list"), + path("setting/add/", views.GoldenConfigSettingCreateView.as_view(), name="goldenconfigsetting_add"), + path("setting/delete/", views.GoldenConfigSettingBulkDeleteView.as_view(), name="goldenconfigsetting_bulk_delete"), path( - "setting/changelog/", + "setting//delete/", views.GoldenConfigSettingDeleteView.as_view(), name="goldenconfigsetting_delete" + ), + path("setting//edit/", views.GoldenConfigSettingEditView.as_view(), name="goldenconfigsetting_edit"), + path( + "setting//changelog/", ObjectChangeLogView.as_view(), name="goldenconfigsetting_changelog", kwargs={"model": models.GoldenConfigSetting}, ), + path("setting//", views.GoldenConfigSettingView.as_view(), name="goldenconfigsetting"), path("config-remove/", views.ConfigRemoveListView.as_view(), name="configremove_list"), path("config-remove/add/", views.ConfigRemoveEditView.as_view(), name="configremove_add"), path( diff --git a/nautobot_golden_config/utilities/helper.py b/nautobot_golden_config/utilities/helper.py index 3bc27cfe..5e4bf174 100644 --- a/nautobot_golden_config/utilities/helper.py +++ b/nautobot_golden_config/utilities/helper.py @@ -3,15 +3,11 @@ from jinja2 import exceptions as jinja_errors -from django import forms -from django.conf import settings - from nautobot.dcim.models import Device from nautobot.dcim.filters import DeviceFilterSet from nautobot.utilities.utils import render_jinja2 from nornir_nautobot.exceptions import NornirNautobotException -from nornir_nautobot.utils.logger import NornirLogger from nautobot_golden_config import models @@ -43,7 +39,10 @@ def get_job_filter(data=None): elif data.get("device"): query.update({"id": data["device"].values_list("pk", flat=True)}) - base_qs = models.GoldenConfigSetting.objects.first().get_queryset() + base_qs = Device.objects.none() + for obj in models.GoldenConfigSetting.objects.all(): + base_qs = base_qs | obj.get_queryset().distinct() + if base_qs.count() == 0: raise NornirNautobotException( "The base queryset didn't find any devices. Please check the Golden Config Setting scope." @@ -69,7 +68,7 @@ def null_to_empty(val): return val -def verify_global_settings(logger, global_settings, attrs): +def verify_settings(logger, global_settings, attrs): """Helper function to verify required attributes are set before a Nornir play start.""" for item in attrs: if not getattr(global_settings, item): @@ -118,58 +117,13 @@ def render_jinja_template(obj, logger, template): raise NornirNautobotException from error -def clean_config_settings(repo_type: str, repo_count: int, match_rule: str): - """Custom clean for `GoldenConfigSettingFeatureForm`. - - Args: - repo_type (str): `intended` or `backup`. - repo_count (int): Total number of repos. - match_rule (str): Template str provided by user to match repos. - - Raises: - ValidationError: Custom Validation on form. - """ - if repo_count > 1: - if not match_rule: - raise forms.ValidationError( - f"If you specify more than one {repo_type} repository, you must provide the {repo_type} repository matching rule template." - ) - elif repo_count == 1 and match_rule: - raise forms.ValidationError( - f"If you configure only one {repo_type} repository, do not enter the {repo_type} repository matching rule template." - ) +def get_device_to_settings_map(queryset): + """Helper function to map settings to devices.""" + device_to_settings_map = {} + queryset_ids = queryset.values_list("id", flat=True) + for golden_config_setting in models.GoldenConfigSetting.objects.all(): + for device_id in golden_config_setting.get_queryset().values_list("id", flat=True): + if (device_id in queryset_ids) and (device_id not in device_to_settings_map): + device_to_settings_map[device_id] = golden_config_setting - -def get_repository_working_dir( - repo_type: str, - obj: Device, - logger: NornirLogger, - global_settings: models.GoldenConfigSetting, -) -> str: - """Match the Device to a repository working directory, based on the repository matching rule. - - Assume that the working directory == the slug of the repo. - - Args: - repo_type (str): Either `intended` or `backup` repository - obj (Device): Django ORM Device object. - logger (NornirLogger): Logger object - global_settings (models.GoldenConfigSetting): Golden Config global settings. - - Returns: - str: The local filesystem working directory corresponding to the repo slug. - """ - match_rule = getattr(global_settings, f"{repo_type}_match_rule") - - if not match_rule: - return getattr(global_settings, f"{repo_type}_repository").first().filesystem_path - - desired_repository_slug = render_jinja_template(obj, logger, match_rule) - matching_repo = getattr(global_settings, f"{repo_type}_repository").filter(slug=desired_repository_slug) - if len(matching_repo) == 1: - return f"{settings.GIT_ROOT}/{matching_repo[0].slug}" - logger.log_failure( - obj, - f"There is no repository slug matching '{desired_repository_slug}' for device. Verify the matching rule and configured Git repositories.", - ) - return None + return device_to_settings_map diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index c309bd79..607455ce 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -1,4 +1,4 @@ -"""Django views for Nautobot Golden Configuration.""" +"""Django views for Nautobot Golden Configuration.""" # pylint: disable=too-many-lines import base64 import difflib import io @@ -30,6 +30,7 @@ from nautobot_golden_config import filters, forms, models, tables from nautobot_golden_config.utilities.constant import CONFIG_FEATURES, ENABLE_COMPLIANCE, PLUGIN_CFG from nautobot_golden_config.utilities.graphql import graph_ql_query +from nautobot_golden_config.utilities.helper import get_device_to_settings_map LOGGER = logging.getLogger(__name__) @@ -57,7 +58,11 @@ def extra_context(self): def alter_queryset(self, request): """Build actual runtime queryset as the build time queryset provides no information.""" - return self.queryset.filter(id__in=models.GoldenConfigSetting.objects.first().get_queryset()) + qs = Device.objects.none() + for obj in models.GoldenConfigSetting.objects.all(): + qs = qs | obj.get_queryset().distinct() + + return self.queryset.filter(id__in=qs) def queryset_to_csv(self): """Override nautobot default to account for using Device model for GoldenConfig data.""" @@ -391,8 +396,8 @@ def diff_structured_data(backup_data, intended_data): if request.GET.get("format") in ["json", "yaml"]: structure_format = request.GET.get("format") - global_settings = models.GoldenConfigSetting.objects.first() - _, output = graph_ql_query(request, device, global_settings.sot_agg_query) + settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device] + _, output = graph_ql_query(request, device, settings.sot_agg_query) if structure_format == "yaml": output = yaml.dump(json.loads(json.dumps(output)), default_flow_style=False) @@ -824,41 +829,55 @@ class ComplianceRuleBulkEditView(generic.BulkEditView): # # GoldenConfigSetting # +class GoldenConfigSettingView(generic.ObjectView): + """View for single GoldenConfigSetting instance.""" + queryset = models.GoldenConfigSetting.objects.all() -class GoldenConfigSettingView(generic.ObjectView): - """View for the only GoldenConfigSetting instance.""" + # def get_extra_context(self, request, instance): + # """Add extra data to detail view for Nautobot.""" + # return {} + +class GoldenConfigSettingCreateView(generic.ObjectEditView): + """Create view.""" + + model = models.GoldenConfigSetting queryset = models.GoldenConfigSetting.objects.all() + model_form = forms.GoldenConfigSettingFeatureForm + template_name = "nautobot_golden_config/goldenconfigsetting_edit.html" - def get(self, request, *args, **kwargs): - """Override the get parameter to get the first instance to enforce singleton pattern.""" - instance = self.queryset.first() - return render( - request, - self.get_template_name(), - { - "object": instance, - **self.get_extra_context(request, instance), - }, - ) +class GoldenConfigSettingDeleteView(generic.ObjectDeleteView): + """Delete view.""" - def get_extra_context(self, request, instance): - """Add extra data to detail view for Nautobot.""" - return {} + model = models.GoldenConfigSetting + queryset = models.GoldenConfigSetting.objects.all() + + +class GoldenConfigSettingBulkDeleteView(generic.BulkDeleteView): + """Delete view.""" + + queryset = models.GoldenConfigSetting.objects.all() + table = tables.GoldenConfigSettingTable class GoldenConfigSettingEditView(generic.ObjectEditView): - """View for editing the Global configurations.""" + """Edit view.""" + model = models.GoldenConfigSetting queryset = models.GoldenConfigSetting.objects.all() model_form = forms.GoldenConfigSettingFeatureForm - default_return_url = "plugins:nautobot_golden_config:goldenconfigsetting" + template_name = "nautobot_golden_config/goldenconfigsetting_edit.html" + + +class GoldenConfigSettingListView(generic.ObjectListView): + """List view.""" - def get_object(self, kwargs): - """Override method to get first object to enforce the singleton pattern.""" - return self.queryset.first() + queryset = models.GoldenConfigSetting.objects.all() + table = tables.GoldenConfigSettingTable + # TODO: Get import working + action_buttons = ("add", "export") # From f937acb7092cc38baf90a59e1e9481262af58214 Mon Sep 17 00:00:00 2001 From: Uros Bajzelj Date: Mon, 28 Feb 2022 21:22:56 +0100 Subject: [PATCH 24/36] Update ci.yml Update credentials for GH publishing --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0180f784..38d01d39 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -138,7 +138,7 @@ jobs: - name: "Upload binaries to release" uses: "svenstaro/upload-release-action@v2" with: - repo_token: "${{ secrets.NTC_GITHUB_TOKEN }}" + repo_token: "${{ secrets.GH_NAUTOBOT_BOT_TOKEN }}" file: "dist/*" tag: "${{ github.ref }}" overwrite: true From dc6340ae72ba217ab861013bf689e7e63df06b78 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 2 Mar 2022 13:01:40 +0000 Subject: [PATCH 25/36] Retrieve device by id in dict returned by get_device_to_settings_map() --- nautobot_golden_config/api/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_golden_config/api/views.py b/nautobot_golden_config/api/views.py index 6604c3ac..48e5bd4c 100644 --- a/nautobot_golden_config/api/views.py +++ b/nautobot_golden_config/api/views.py @@ -31,7 +31,7 @@ class SOTAggDeviceDetailView(APIView): def get(self, request, *args, **kwargs): """Get method serialize for a dictionary to json response.""" device = Device.objects.get(pk=kwargs["pk"]) - settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device] + settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device.id] status_code, data = graph_ql_query(request, device, settings.sot_agg_query) data = json.loads(json.dumps(data)) return Response(serializers.GraphQLSerializer(data=data).initial_data, status=status_code) From 4a367ad78278927e8c18d555652af5d2b4e1f6a9 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 3 Mar 2022 14:07:32 +0000 Subject: [PATCH 26/36] Fix config_intended nornir task --- nautobot_golden_config/nornir_plays/config_intended.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index 2ee3663a..e33f360f 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -78,7 +78,7 @@ def run_template( # pylint: disable=too-many-arguments obj=obj, logger=logger, jinja_template=jinja_template, - jinja_root_path=settings.jinja_repository.path, + jinja_root_path=settings.jinja_repository.filesystem_path, output_file_location=output_file_location, default_drivers_mapping=get_dispatcher(), jinja_filters=jinja_env.filters, From b76edf2c1e1fe3df809664c62aa0ddde81ed0941 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 3 Mar 2022 15:18:35 +0000 Subject: [PATCH 27/36] Fix key reference in sotagg pop up code --- nautobot_golden_config/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index 607455ce..c17714af 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -396,7 +396,7 @@ def diff_structured_data(backup_data, intended_data): if request.GET.get("format") in ["json", "yaml"]: structure_format = request.GET.get("format") - settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device] + settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device.id] _, output = graph_ql_query(request, device, settings.sot_agg_query) if structure_format == "yaml": From 44430319795273bcc6572075209eb31174db806b Mon Sep 17 00:00:00 2001 From: Jeff Kala <48843785+jeffkala@users.noreply.github.com> Date: Wed, 16 Mar 2022 23:44:42 -0500 Subject: [PATCH 28/36] fix to support secrets group on the git repos (#226) * fix to support secrets group on the git repos fixes #224 --- docs/navigating-golden.md | 6 +++-- docs/quick-start.md | 2 ++ .../tests/test_utilities/test_git.py | 4 +++- nautobot_golden_config/utilities/git.py | 24 +++++++++++++++---- nautobot_golden_config/utilities/utils.py | 23 ++++++++++++++++++ 5 files changed, 51 insertions(+), 8 deletions(-) diff --git a/docs/navigating-golden.md b/docs/navigating-golden.md index bff8621e..061fc46d 100644 --- a/docs/navigating-golden.md +++ b/docs/navigating-golden.md @@ -39,7 +39,7 @@ Each Job attempts to provide sane error handling, and respects the `debug` flag The golden configuration plugin settings can be found by navigating to `Plugins -> Settings` button. Select one of the Settings, under the `Golden Configuration` section. Since Golden Configuration Plugin version 1.0, the plugin allows for multiple settings to be configured by the User. Each of the settings, has the individual repositories and configuration details, as well as the scope. -You could use a combination of settings to customize Your Configuration Compliance behaviour. +You could use a combination of settings to customize Your Configuration Compliance behavior. Settings have a name and a weight. The weight parameter indicates the priority of given Settings - the higher the weight, the device matching the scope defined will be assigned to the scope. At the same moment, each device will be matched up to maximum of only one `Settings.` In case of the same weight, the sorting is performed by the name. @@ -123,6 +123,8 @@ Parameters: |Provides|Valid providers for Git Repo.|
+> Note: If Secret Group is used for the Repositories the secrets type HTTP(S) is required for this plugin. + ![Example Git Backups](./img/backup-git-step2.png) Select `backup configs` and click on `Create`. @@ -151,7 +153,7 @@ The plugin makes use of template content `right_page` in order to use display in ## API -To run the job programmactially, reference the [nautobot documentation](https://nautobot.readthedocs.io/en/stable/additional-features/jobs/#via-the-api) for the proper API call. Pay special attention to the `class_path` defintion. +To run the job programmactially, reference the [nautobot documentation](https://nautobot.readthedocs.io/en/stable/additional-features/jobs/#via-the-api) for the proper API call. Pay special attention to the `class_path` definition. ## Feature Enablement diff --git a/docs/quick-start.md b/docs/quick-start.md index 11b2e735..8f101f49 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -48,6 +48,8 @@ Follow the steps below to get up and running for the intended configuration elem 1. Enable the feature in the `PLUGIN_SETTINGS`. The configuration should have `"enable_intended": True` set in the `PLUGINS_CONFIG` dictionary for `nautobot_golden_config`. +> Notice: If Secret Group is used for the Repositories the secrets type HTTP(S) is required for this plugin. + 2. Add any git repositories that will be used to house the intended configurations. 1. In the UI `Extensibility -> Git Repositories`. Click Add. diff --git a/nautobot_golden_config/tests/test_utilities/test_git.py b/nautobot_golden_config/tests/test_utilities/test_git.py index cc7b9e1f..d4a9cc8e 100644 --- a/nautobot_golden_config/tests/test_utilities/test_git.py +++ b/nautobot_golden_config/tests/test_utilities/test_git.py @@ -1,7 +1,8 @@ """Unit tests for nautobot_golden_config utilities git.""" import unittest -from unittest.mock import patch, Mock +from unittest.mock import Mock, patch + from nautobot_golden_config.utilities.git import GitRepo @@ -15,6 +16,7 @@ def setUp(self): mock_obj.remote_url = "/fake/remote" mock_obj._token = "fake token" # pylint: disable=protected-access mock_obj.username = None + mock_obj.secrets_group = None self.mock_obj = mock_obj @patch("nautobot_golden_config.utilities.git.Repo", autospec=True) diff --git a/nautobot_golden_config/utilities/git.py b/nautobot_golden_config/utilities/git.py index 556c7853..6182cc73 100644 --- a/nautobot_golden_config/utilities/git.py +++ b/nautobot_golden_config/utilities/git.py @@ -1,17 +1,27 @@ """Git helper methods and class.""" +import logging import os import re -import logging - from urllib.parse import quote + from git import Repo +from nautobot.extras.choices import SecretsGroupSecretTypeChoices +from nautobot_golden_config.utilities.utils import get_secret_value + LOGGER = logging.getLogger(__name__) -class GitRepo: +def _get_secrets(git_obj): + """Get Secrets Information from Associated Git Secrets Group.""" + user_token = get_secret_value(secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, git_obj=git_obj) + token = get_secret_value(secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, git_obj=git_obj) + return (user_token, token) + + +class GitRepo: # pylint: disable=too-many-instance-attributes """Git Repo object to help with git actions.""" def __init__(self, obj): @@ -22,8 +32,12 @@ def __init__(self, obj): """ self.path = obj.filesystem_path self.url = obj.remote_url - self.token = obj._token - self.token_user = obj.username + self.secrets_group = obj.secrets_group + if self.secrets_group: + self.token_user, self.token = _get_secrets(obj) + else: + self.token = obj._token + self.token_user = obj.username if self.token and self.token not in self.url: # Some Git Providers require a user as well as a token. if self.token_user: diff --git a/nautobot_golden_config/utilities/utils.py b/nautobot_golden_config/utilities/utils.py index a3df5500..4c54b267 100644 --- a/nautobot_golden_config/utilities/utils.py +++ b/nautobot_golden_config/utilities/utils.py @@ -1,5 +1,7 @@ """Utility functions.""" +from nautobot.extras.choices import SecretsGroupAccessTypeChoices +from nautobot.extras.models.secrets import SecretsGroupAssociation from nautobot_golden_config.utilities.constant import PLUGIN_CFG @@ -8,3 +10,24 @@ def get_platform(platform): if not PLUGIN_CFG.get("platform_slug_map"): return platform return PLUGIN_CFG.get("platform_slug_map").get(platform, platform) + + +def get_secret_value(secret_type, git_obj): + """Get value for a secret based on secret type and device. + + Args: + secret_type (SecretsGroupSecretTypeChoices): Type of secret to check. + git_obj (extras.GitRepository): Nautobot git object. + + Returns: + str: Secret value. + """ + try: + value = git_obj.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=secret_type, + obj=git_obj, + ) + except SecretsGroupAssociation.DoesNotExist: + value = None + return value From 08f355ffb1b1bc7f46563f92e199a2745347e829 Mon Sep 17 00:00:00 2001 From: mzb Date: Thu, 17 Mar 2022 05:46:09 +0100 Subject: [PATCH 29/36] Separate data migrations from schema migrations in Multiple GC Settings Migration (#223) --- .../migrations/0009_multiple_gc_settings.py | 123 ------------------ .../0009_multiple_gc_settings_part_1.py | 71 ++++++++++ .../0010_multiple_gc_settings_part_2.py | 30 +++++ .../0011_multiple_gc_settings_part_3.py | 44 +++++++ .../0012_multiple_gc_settings_part_4.py | 30 +++++ .../0013_multiple_gc_settings_part_5.py | 19 +++ 6 files changed, 194 insertions(+), 123 deletions(-) delete mode 100644 nautobot_golden_config/migrations/0009_multiple_gc_settings.py create mode 100644 nautobot_golden_config/migrations/0009_multiple_gc_settings_part_1.py create mode 100644 nautobot_golden_config/migrations/0010_multiple_gc_settings_part_2.py create mode 100644 nautobot_golden_config/migrations/0011_multiple_gc_settings_part_3.py create mode 100644 nautobot_golden_config/migrations/0012_multiple_gc_settings_part_4.py create mode 100644 nautobot_golden_config/migrations/0013_multiple_gc_settings_part_5.py diff --git a/nautobot_golden_config/migrations/0009_multiple_gc_settings.py b/nautobot_golden_config/migrations/0009_multiple_gc_settings.py deleted file mode 100644 index 703371a9..00000000 --- a/nautobot_golden_config/migrations/0009_multiple_gc_settings.py +++ /dev/null @@ -1,123 +0,0 @@ -# Generated by Django 3.1.14 on 2022-02-04 09:52 - -from django.db import migrations, models -import django.db.models.deletion - - -def convert_many_repos_part1(apps, schema_editor): - """ - Add the current `backup_repository` and `intended_repository` objects values - to the `FK` additional intermediary attritbute to retain data.` - """ - GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") - - settings_obj = GoldenConfigSetting.objects.first() - - if settings_obj.backup_repository: - settings_obj.backup_repository_tmp = settings_obj.backup_repository.first() - settings_obj.save() - - if settings_obj.intended_repository: - settings_obj.intended_repository_tmp = settings_obj.intended_repository.first() - settings_obj.save() - - -def convert_many_repos_part2(apps, schema_editor): - """ - Add the current `backup_repository_tmp` and `intended_repository_tmp` object values - to the FKs final attributes to retain data.` - """ - GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") - - settings_obj = GoldenConfigSetting.objects.first() - - if settings_obj.backup_repository_tmp: - settings_obj.backup_repository = settings_obj.backup_repository_tmp - settings_obj.save() - - if settings_obj.intended_repository_tmp: - settings_obj.intended_repository = settings_obj.intended_repository_tmp - settings_obj.save() - - -class Migration(migrations.Migration): - - dependencies = [ - ('extras', '0018_joblog_data_migration'), - ('nautobot_golden_config', '0008_multi_repo_support_final'), - ] - - operations = [ - migrations.AlterModelOptions( - name='goldenconfigsetting', - options={'ordering': ['-weight', 'name'], 'verbose_name': 'Golden Config Setting'}, - ), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='backup_match_rule', - ), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='intended_match_rule', - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='description', - field=models.CharField(blank=True, max_length=200), - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='name', - field=models.CharField(default='Default Settings', max_length=100, unique=True), - preserve_default=False, - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='slug', - field=models.SlugField(default='default', max_length=100, unique=True), - preserve_default=False, - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='weight', - field=models.PositiveSmallIntegerField(default=1000), - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='backup_repository_tmp', - field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.backupconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='backup_repository', to='extras.gitrepository'), - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='intended_repository_tmp', - field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.intendedconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='intended_repository', to='extras.gitrepository'), - ), - migrations.RunPython(convert_many_repos_part1), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='backup_repository', - ), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='intended_repository', - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='backup_repository', - field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.backupconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='backup_repository', to='extras.gitrepository'), - ), - migrations.AddField( - model_name='goldenconfigsetting', - name='intended_repository', - field=models.ForeignKey(blank=True, limit_choices_to={'provided_contents__contains': 'nautobot_golden_config.intendedconfigs'}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='intended_repository', to='extras.gitrepository'), - ), - migrations.RunPython(convert_many_repos_part2), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='backup_repository_tmp', - ), - migrations.RemoveField( - model_name='goldenconfigsetting', - name='intended_repository_tmp', - ), - ] diff --git a/nautobot_golden_config/migrations/0009_multiple_gc_settings_part_1.py b/nautobot_golden_config/migrations/0009_multiple_gc_settings_part_1.py new file mode 100644 index 00000000..c47e45b7 --- /dev/null +++ b/nautobot_golden_config/migrations/0009_multiple_gc_settings_part_1.py @@ -0,0 +1,71 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("extras", "0018_joblog_data_migration"), + ("nautobot_golden_config", "0008_multi_repo_support_final"), + ] + + operations = [ + migrations.AlterModelOptions( + name="goldenconfigsetting", + options={"ordering": ["-weight", "name"], "verbose_name": "Golden Config Setting"}, + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="backup_match_rule", + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="intended_match_rule", + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="description", + field=models.CharField(blank=True, max_length=200), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="name", + field=models.CharField(default="Default Settings", max_length=100, unique=True), + preserve_default=False, + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="slug", + field=models.SlugField(default="default", max_length=100, unique=True), + preserve_default=False, + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="weight", + field=models.PositiveSmallIntegerField(default=1000), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="backup_repository_tmp", + field=models.ForeignKey( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="backup_repository", + to="extras.gitrepository", + ), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="intended_repository_tmp", + field=models.ForeignKey( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="intended_repository", + to="extras.gitrepository", + ), + ), + ] diff --git a/nautobot_golden_config/migrations/0010_multiple_gc_settings_part_2.py b/nautobot_golden_config/migrations/0010_multiple_gc_settings_part_2.py new file mode 100644 index 00000000..95af45f7 --- /dev/null +++ b/nautobot_golden_config/migrations/0010_multiple_gc_settings_part_2.py @@ -0,0 +1,30 @@ +from django.db import migrations + + +def convert_many_repos_part1(apps, schema_editor): + """ + Add the current `backup_repository` and `intended_repository` objects values + to the `FK` additional intermediary attritbute to retain data.` + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + + if settings_obj.backup_repository: + settings_obj.backup_repository_tmp = settings_obj.backup_repository.first() + settings_obj.save() + + if settings_obj.intended_repository: + settings_obj.intended_repository_tmp = settings_obj.intended_repository.first() + settings_obj.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0009_multiple_gc_settings_part_1"), + ] + + operations = [ + migrations.RunPython(convert_many_repos_part1), + ] diff --git a/nautobot_golden_config/migrations/0011_multiple_gc_settings_part_3.py b/nautobot_golden_config/migrations/0011_multiple_gc_settings_part_3.py new file mode 100644 index 00000000..f5a1dd3e --- /dev/null +++ b/nautobot_golden_config/migrations/0011_multiple_gc_settings_part_3.py @@ -0,0 +1,44 @@ +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0010_multiple_gc_settings_part_2"), + ] + + operations = [ + migrations.RemoveField( + model_name="goldenconfigsetting", + name="backup_repository", + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="intended_repository", + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="backup_repository", + field=models.ForeignKey( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.backupconfigs"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="backup_repository", + to="extras.gitrepository", + ), + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="intended_repository", + field=models.ForeignKey( + blank=True, + limit_choices_to={"provided_contents__contains": "nautobot_golden_config.intendedconfigs"}, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="intended_repository", + to="extras.gitrepository", + ), + ), + ] diff --git a/nautobot_golden_config/migrations/0012_multiple_gc_settings_part_4.py b/nautobot_golden_config/migrations/0012_multiple_gc_settings_part_4.py new file mode 100644 index 00000000..4c7892d5 --- /dev/null +++ b/nautobot_golden_config/migrations/0012_multiple_gc_settings_part_4.py @@ -0,0 +1,30 @@ +from django.db import migrations + + +def convert_many_repos_part2(apps, schema_editor): + """ + Add the current `backup_repository_tmp` and `intended_repository_tmp` object values + to the FKs final attributes to retain data.` + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + settings_obj = GoldenConfigSetting.objects.first() + + if settings_obj.backup_repository_tmp: + settings_obj.backup_repository = settings_obj.backup_repository_tmp + settings_obj.save() + + if settings_obj.intended_repository_tmp: + settings_obj.intended_repository = settings_obj.intended_repository_tmp + settings_obj.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0011_multiple_gc_settings_part_3"), + ] + + operations = [ + migrations.RunPython(convert_many_repos_part2), + ] diff --git a/nautobot_golden_config/migrations/0013_multiple_gc_settings_part_5.py b/nautobot_golden_config/migrations/0013_multiple_gc_settings_part_5.py new file mode 100644 index 00000000..3b7fa52d --- /dev/null +++ b/nautobot_golden_config/migrations/0013_multiple_gc_settings_part_5.py @@ -0,0 +1,19 @@ +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0012_multiple_gc_settings_part_4"), + ] + + operations = [ + migrations.RemoveField( + model_name="goldenconfigsetting", + name="backup_repository_tmp", + ), + migrations.RemoveField( + model_name="goldenconfigsetting", + name="intended_repository_tmp", + ), + ] From 9f92aa6b855cd0cf02e69d5cf674b93c1aa3b695 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 17 Mar 2022 20:54:59 +0000 Subject: [PATCH 30/36] SoTAgg field conversion to FK to GraphQL Saved Query (#218) * Convert SoTAgg Query field to the FK to Saved GraphQL Query object. --- docs/navigating-sot-agg.md | 7 +- docs/quick-start.md | 9 +- nautobot_golden_config/api/views.py | 2 +- .../0014_convert_sotagg_queries_part1.py | 17 +++ .../0015_convert_sotagg_queries_part2.py | 24 +++++ .../0016_convert_sotagg_queries_part3.py | 29 +++++ .../0017_convert_sotagg_queries_part4.py | 49 +++++++++ .../0018_convert_sotagg_queries_part5.py | 15 +++ nautobot_golden_config/models.py | 22 ++-- .../nornir_plays/config_intended.py | 2 +- .../goldenconfigsetting.html | 6 +- nautobot_golden_config/tests/conftest.py | 102 +++++++++++++++++- nautobot_golden_config/tests/test_api.py | 23 ++-- nautobot_golden_config/tests/test_graphql.py | 40 ++----- nautobot_golden_config/tests/test_models.py | 15 +-- nautobot_golden_config/views.py | 2 +- 16 files changed, 286 insertions(+), 78 deletions(-) create mode 100644 nautobot_golden_config/migrations/0014_convert_sotagg_queries_part1.py create mode 100644 nautobot_golden_config/migrations/0015_convert_sotagg_queries_part2.py create mode 100644 nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py create mode 100644 nautobot_golden_config/migrations/0017_convert_sotagg_queries_part4.py create mode 100644 nautobot_golden_config/migrations/0018_convert_sotagg_queries_part5.py diff --git a/docs/navigating-sot-agg.md b/docs/navigating-sot-agg.md index 8ef179f4..56b892b5 100644 --- a/docs/navigating-sot-agg.md +++ b/docs/navigating-sot-agg.md @@ -2,7 +2,7 @@ The Source of Truth Aggregation feature uses several key components: -* A single GraphQL query which aggregates device data. +* A GraphQL query, per settings instance, which aggregates device data. * A facility to modify data with a "transposer" function. * Nautobot's config context feature and policy engine. * Nautobot's native git platform. @@ -10,11 +10,12 @@ The Source of Truth Aggregation feature uses several key components: ## GraphQL There is currently support to make an arbitrary GraphQL query that has "device_id" as a variable. It is likely best to use the GraphiQL interface to model -your data, and then save that query to the configuration. The application configuration ensures the following two components. +your data, and then save that query as the Saved Query object. The application configuration ensures the following component. -* The query is a valid GraphQL query. * The query starts with exactly "query ($device_id: ID!)"". This is to help fail fast and help with overall user experience of clear expectations. +> NOTE: The above validation will not happen if the query in the Saved Query object is modified after it's been assigned to the Settings object. That is, validation of the SoTAgg field only happens when the Settings object is created or updated. + Note that the GraphQL query returned is modified to remove the root key of `device`, so instead of all data being within device, such as `{"device": {"site": {"slug": "jcy"}}}`, it is simply `{"site": {"slug": "jcy"}}` as an example. diff --git a/docs/quick-start.md b/docs/quick-start.md index 8f101f49..65115f26 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -1,8 +1,10 @@ # Quick Start Guides +- [Quick Start Guides](#quick-start-guides) - [Backup Configuration](#backup-configuration) - [Intended Configuration](#intended-configuration) - [Compliance](#compliance) +- [Load Properties from Git](#load-properties-from-git) # Backup Configuration @@ -76,7 +78,12 @@ Follow the steps below to get up and running for the intended configuration elem 5. Determine what data(variables) the Jinja2 templates need from Nautobot. 1. See [Source of Truth Agg Details](./navigating-sot-agg.md) - 2. Populate the SoTAgg field in the `Plugin -> Settings`. + 2. In the UI `Extensibility -> GraphQL Queries`. Click Add. + 3. Populate the GraphQL data. + 4. Make sure to follow the format specified in the **GraphQL** section in [Source of Truth Agg Details](./navigating-sot-agg.md) + 5. Click Create. + 6. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 7. Select a SoTAgg Saved Query. (The dropdown will show the GraphQL query that was just created.) 6. Execute the Intended. diff --git a/nautobot_golden_config/api/views.py b/nautobot_golden_config/api/views.py index 48e5bd4c..7b3bc376 100644 --- a/nautobot_golden_config/api/views.py +++ b/nautobot_golden_config/api/views.py @@ -32,7 +32,7 @@ def get(self, request, *args, **kwargs): """Get method serialize for a dictionary to json response.""" device = Device.objects.get(pk=kwargs["pk"]) settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device.id] - status_code, data = graph_ql_query(request, device, settings.sot_agg_query) + status_code, data = graph_ql_query(request, device, settings.sot_agg_query.query) data = json.loads(json.dumps(data)) return Response(serializers.GraphQLSerializer(data=data).initial_data, status=status_code) diff --git a/nautobot_golden_config/migrations/0014_convert_sotagg_queries_part1.py b/nautobot_golden_config/migrations/0014_convert_sotagg_queries_part1.py new file mode 100644 index 00000000..48317183 --- /dev/null +++ b/nautobot_golden_config/migrations/0014_convert_sotagg_queries_part1.py @@ -0,0 +1,17 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("extras", "0018_joblog_data_migration"), + ("nautobot_golden_config", "0013_multiple_gc_settings_part_5"), + ] + + operations = [ + migrations.AddField( + model_name="goldenconfigsetting", + name="sot_agg_query_tmp", + field=models.TextField(blank=True), + ), + ] diff --git a/nautobot_golden_config/migrations/0015_convert_sotagg_queries_part2.py b/nautobot_golden_config/migrations/0015_convert_sotagg_queries_part2.py new file mode 100644 index 00000000..03e75928 --- /dev/null +++ b/nautobot_golden_config/migrations/0015_convert_sotagg_queries_part2.py @@ -0,0 +1,24 @@ +from django.db import migrations, models + + +def save_existing_sotagg_queries(apps, schema_editor): + """ + Save to the temp field the current SoTAgg Query strings. + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + + for gc_setting_obj in GoldenConfigSetting.objects.all(): + if gc_setting_obj.sot_agg_query: + gc_setting_obj.sot_agg_query_tmp = gc_setting_obj.sot_agg_query + gc_setting_obj.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0014_convert_sotagg_queries_part1"), + ] + + operations = [ + migrations.RunPython(save_existing_sotagg_queries), + ] diff --git a/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py b/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py new file mode 100644 index 00000000..5bc66d31 --- /dev/null +++ b/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py @@ -0,0 +1,29 @@ +from datetime import date + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0015_convert_sotagg_queries_part2"), + ] + + operations = [ + migrations.RemoveField( + model_name="goldenconfigsetting", + name="sot_agg_query", + ), + migrations.AddField( + model_name="goldenconfigsetting", + name="sot_agg_query", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="sot_aggregation", + to="extras.graphqlquery", + ), + ), + ] diff --git a/nautobot_golden_config/migrations/0017_convert_sotagg_queries_part4.py b/nautobot_golden_config/migrations/0017_convert_sotagg_queries_part4.py new file mode 100644 index 00000000..dbb017d0 --- /dev/null +++ b/nautobot_golden_config/migrations/0017_convert_sotagg_queries_part4.py @@ -0,0 +1,49 @@ +from datetime import date +import logging + +from django.core.validators import ValidationError +from django.db import migrations + +logger = logging.getLogger("nautobot") + + +def create_and_link_gql_queries(apps, schema_editor): + """ + Create Saved GraphQL Query objects and link them to SoTAgg Query field. + """ + GoldenConfigSetting = apps.get_model("nautobot_golden_config", "GoldenConfigSetting") + GraphQLQuery = apps.get_model("extras", "GraphQLQuery") + + today = str(date.today()) + + for gc_setting_obj in GoldenConfigSetting.objects.all(): + if gc_setting_obj.sot_agg_query_tmp: + gcsetting_name = gc_setting_obj.name + sotagg_query = gc_setting_obj.sot_agg_query_tmp + if not sotagg_query.strip().startswith("query ($device_id: ID!)"): + msg = f"Could not migrate SoTAgg query for Golden Config Setting '{gcsetting_name}'" + logger.warning(msg) + continue + + gqlsq_name = f"GC {gcsetting_name} - {today}" + gqlsq_obj = GraphQLQuery() + gqlsq_obj.name = gqlsq_name + gqlsq_obj.query = sotagg_query + gqlsq_obj.variables = {"device_id": ""} + gqlsq_obj.save() + + gc_setting_obj.sot_agg_query = gqlsq_obj + gc_setting_obj.save() + msg = f"Migrated SoTAgg query for Golden Config Setting '{gcsetting_name}'" + logger.info(msg) + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0016_convert_sotagg_queries_part3"), + ] + + operations = [ + migrations.RunPython(create_and_link_gql_queries), + ] diff --git a/nautobot_golden_config/migrations/0018_convert_sotagg_queries_part5.py b/nautobot_golden_config/migrations/0018_convert_sotagg_queries_part5.py new file mode 100644 index 00000000..97d1eaac --- /dev/null +++ b/nautobot_golden_config/migrations/0018_convert_sotagg_queries_part5.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("nautobot_golden_config", "0017_convert_sotagg_queries_part4"), + ] + + operations = [ + migrations.RemoveField( + model_name="goldenconfigsetting", + name="sot_agg_query_tmp", + ), + ] diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index dd39978d..51366ecf 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -8,9 +8,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.shortcuts import reverse from django.utils.module_loading import import_string -from graphene_django.settings import graphene_settings -from graphql import get_default_backend -from graphql.error import GraphQLSyntaxError from nautobot.dcim.models import Device from nautobot.extras.models import ObjectChange @@ -480,11 +477,12 @@ class GoldenConfigSetting(PrimaryModel): null=True, help_text="API filter in JSON format matching the list of devices for the scope of devices to be considered.", ) - sot_agg_query = models.TextField( - null=False, + sot_agg_query = models.ForeignKey( + to="extras.GraphQLQuery", + on_delete=models.SET_NULL, + null=True, blank=True, - verbose_name="GraphQL Query", - help_text=f"A query starting with `{GRAPHQL_STR_START}` that is used to render the config. Please make sure to alias name, see FAQ for more details.", + related_name="sot_aggregation", ) def get_absolute_url(self): # pylint: disable=no-self-use @@ -511,16 +509,8 @@ def clean(self): super().clean() if self.sot_agg_query: - try: - LOGGER.debug("GraphQL - test query: `%s`", str(self.sot_agg_query)) - backend = get_default_backend() - schema = graphene_settings.SCHEMA - backend.document_from_string(schema, str(self.sot_agg_query)) - except GraphQLSyntaxError as err: - raise ValidationError(str(err)) # pylint: disable=raise-missing-from - LOGGER.debug("GraphQL - test query start with: `%s`", GRAPHQL_STR_START) - if not str(self.sot_agg_query).startswith(GRAPHQL_STR_START): + if not str(self.sot_agg_query.query).startswith(GRAPHQL_STR_START): raise ValidationError(f"The GraphQL query must start with exactly `{GRAPHQL_STR_START}`") if self.scope: diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py index e33f360f..f8a1deb9 100644 --- a/nautobot_golden_config/nornir_plays/config_intended.py +++ b/nautobot_golden_config/nornir_plays/config_intended.py @@ -65,7 +65,7 @@ def run_template( # pylint: disable=too-many-arguments output_file_location = os.path.join(intended_directory, intended_path_template_obj) jinja_template = render_jinja_template(obj, logger, settings.jinja_path_template) - status, device_data = graph_ql_query(nautobot_job.request, obj, settings.sot_agg_query) + status, device_data = graph_ql_query(nautobot_job.request, obj, settings.sot_agg_query.query) if status != 200: logger.log_failure(obj, f"The GraphQL query return a status of {str(status)} with error of {str(device_data)}") raise NornirNautobotException() diff --git a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html index 3a022bc3..ca75c6a9 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/goldenconfigsetting.html @@ -190,7 +190,11 @@

{{ object }}

GraphQL Query -
{{ object.sot_agg_query|render_json }}
+ {% if object.sot_agg_query %} + {{ object.sot_agg_query }} + {% else %} + None + {% endif %} diff --git a/nautobot_golden_config/tests/conftest.py b/nautobot_golden_config/tests/conftest.py index ed2183ed..a1781f94 100644 --- a/nautobot_golden_config/tests/conftest.py +++ b/nautobot_golden_config/tests/conftest.py @@ -1,7 +1,7 @@ """Params for testing.""" from nautobot.dcim.models import Device, Site, Manufacturer, DeviceType, DeviceRole, Rack, RackGroup, Region, Platform from nautobot.tenancy.models import Tenant, TenantGroup -from nautobot.extras.models import Status, GitRepository +from nautobot.extras.models import Status, GitRepository, GraphQLQuery from nautobot.extras.datasources.registry import get_datasource_contents from django.utils.text import slugify @@ -297,3 +297,103 @@ def create_helper_repo(name="foobaz", provides=None): ], ) git_repo.save(trigger_resync=False) + + +def create_saved_queries() -> None: + """ + Create saved GraphQL queries. + """ + variables = {"device_id": ""} + + name = "GC-SoTAgg-Query-1" + query = """query ($device_id: ID!) { + device(id: $device_id) { + name + tenant { + name + } + } + } + """ + saved_query_1 = GraphQLQuery( + name=name, + slug=slugify(name), + variables=variables, + query=query, + ) + saved_query_1.save() + + name = "GC-SoTAgg-Query-2" + query = """query ($device_id: ID!) { + device(id: $device_id) { + config_context + name + site { + name + } + } + } + """ + saved_query_2 = GraphQLQuery( + name=name, + slug=slugify(name), + variables=variables, + query=query, + ) + saved_query_2.save() + + name = "GC-SoTAgg-Query-3" + query = '{devices(name:"ams-edge-01"){id}}' + saved_query_3 = GraphQLQuery( + name=name, + slug=slugify(name), + query=query, + ) + saved_query_3.save() + + name = "GC-SoTAgg-Query-4" + query = """ + query { + compliance_rules { + feature { + name + } + platform { + name + } + description + config_ordered + match_config + } + } + """ + saved_query_4 = GraphQLQuery( + name=name, + slug=slugify(name), + query=query, + ) + saved_query_4.save() + + name = "GC-SoTAgg-Query-5" + query = """ + query { + golden_config_settings { + name + slug + weight + backup_path_template + intended_path_template + jinja_path_template + backup_test_connectivity + sot_agg_query { + name + } + } + } + """ + saved_query_5 = GraphQLQuery( + name=name, + slug=slugify(name), + query=query, + ) + saved_query_5.save() diff --git a/nautobot_golden_config/tests/test_api.py b/nautobot_golden_config/tests/test_api.py index b42c2d7b..e5caa7c4 100644 --- a/nautobot_golden_config/tests/test_api.py +++ b/nautobot_golden_config/tests/test_api.py @@ -6,10 +6,16 @@ from rest_framework import status from nautobot.utilities.testing import APITestCase -from nautobot.extras.models import GitRepository +from nautobot.extras.models import GitRepository, GraphQLQuery from nautobot_golden_config.models import GoldenConfigSetting -from .conftest import create_device, create_feature_rule_json, create_config_compliance, create_git_repos +from .conftest import ( + create_device, + create_feature_rule_json, + create_config_compliance, + create_git_repos, + create_saved_queries, +) User = get_user_model() @@ -93,6 +99,7 @@ def setUp(self): """Create a superuser and token for API calls.""" super().setUp() create_git_repos() + create_saved_queries() self.add_permissions("nautobot_golden_config.add_goldenconfigsetting") self.add_permissions("nautobot_golden_config.change_goldenconfigsetting") self.base_view = reverse("plugins-api:nautobot_golden_config-api:goldenconfigsetting-list") @@ -110,7 +117,7 @@ def setUp(self): "jinja_path_template": "templates/{{obj.platform.slug}}/{{obj.platform.slug}}_main.j2", "backup_test_connectivity": False, "scope": {"has_primary_ip": "True"}, - "sot_agg_query": "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", + "sot_agg_query": str(GraphQLQuery.objects.get(name="GC-SoTAgg-Query-1").id), "jinja_repository": str(GitRepository.objects.get(name="test-jinja-repo-1").id), "backup_repository": str(GitRepository.objects.get(name="test-backup-repo-1").id), "intended_repository": str(GitRepository.objects.get(name="test-intended-repo-1").id), @@ -140,10 +147,7 @@ def test_golden_config_settings_create_good(self): ) self.assertFalse(response.data["backup_test_connectivity"]) self.assertEqual(response.data["scope"], {"has_primary_ip": "True"}) - self.assertEqual( - response.data["sot_agg_query"], - "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", - ) + self.assertEqual(response.data["sot_agg_query"], GraphQLQuery.objects.get(name="GC-SoTAgg-Query-1").id) self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) self.assertEqual(response.data["backup_repository"], GitRepository.objects.get(name="test-backup-repo-1").id) self.assertEqual( @@ -181,10 +185,7 @@ def test_golden_config_settings_update_good(self): ) self.assertFalse(response.data["backup_test_connectivity"]) self.assertEqual(response.data["scope"], {"has_primary_ip": "True"}) - self.assertEqual( - response.data["sot_agg_query"], - "query ($device_id: ID!) {\r\n device(id: $device_id) {\r\n config_context\r\n device_role {\r\n name\r\n slug\r\n }\r\n hostname: name\r\n platform {\r\n manufacturer {\r\n name\r\n }\r\n name\r\n napalm_driver\r\n slug\r\n }\r\n primary_ip4 {\r\n address\r\n interface {\r\n name\r\n }\r\n id\r\n }\r\n site {\r\n name\r\n region {\r\n name\r\n slug\r\n parent {\r\n name\r\n slug\r\n }\r\n }\r\n slug\r\n }\r\n }\r\n}", - ) + self.assertEqual(response.data["sot_agg_query"], GraphQLQuery.objects.get(name="GC-SoTAgg-Query-1").id) self.assertEqual(response.data["jinja_repository"], GitRepository.objects.get(name="test-jinja-repo-1").id) self.assertEqual(response.data["backup_repository"], GitRepository.objects.get(name="test-backup-repo-1").id) self.assertEqual( diff --git a/nautobot_golden_config/tests/test_graphql.py b/nautobot_golden_config/tests/test_graphql.py index 4ff4746f..a7a423c8 100644 --- a/nautobot_golden_config/tests/test_graphql.py +++ b/nautobot_golden_config/tests/test_graphql.py @@ -10,7 +10,7 @@ from graphene_django.settings import graphene_settings from nautobot.dcim.models import Platform, Site, Device, Manufacturer, DeviceRole, DeviceType -from nautobot.extras.models import GitRepository +from nautobot.extras.models import GitRepository, GraphQLQuery from nautobot_golden_config.models import ( ComplianceFeature, @@ -22,6 +22,8 @@ ConfigReplace, ) +from .conftest import create_saved_queries + # Use the proper swappable User model User = get_user_model() @@ -84,6 +86,7 @@ def setUp(self): """Setup request and create test data to validate GraphQL.""" super().setUp() self.user = User.objects.create(username="Super User", is_active=True, is_superuser=True) + create_saved_queries() # Initialize fake request that will be required to execute GraphQL query self.request = RequestFactory().request(SERVER_NAME="WebRequestContext") @@ -131,7 +134,7 @@ def setUp(self): jinja_path_template="{{jinja_path}}", backup_test_connectivity=True, scope={"platform": ["platform1"]}, - sot_agg_query="{test_model}", + sot_agg_query=GraphQLQuery.objects.get(name="GC-SoTAgg-Query-1"), backup_repository=GitRepository.objects.get( provided_contents__contains="nautobot_golden_config.backupconfigs" ), @@ -264,21 +267,7 @@ def test_query_golden_config(self): def test_query_compliance_rule(self): """Test Configuration Compliance Details Model.""" - query = """ - query { - compliance_rules { - feature { - name - } - platform { - name - } - description - config_ordered - match_config - } - } - """ + query = GraphQLQuery.objects.get(name="GC-SoTAgg-Query-4").query response_data = { "compliance_rules": [ { @@ -296,20 +285,7 @@ def test_query_compliance_rule(self): def test_query_golden_config_setting(self): """Test GraphQL Golden Config Settings Model.""" - query = """ - query { - golden_config_settings { - name - slug - weight - backup_path_template - intended_path_template - jinja_path_template - backup_test_connectivity - sot_agg_query - } - } - """ + query = GraphQLQuery.objects.get(name="GC-SoTAgg-Query-5").query response_data = { "golden_config_settings": [ { @@ -320,7 +296,7 @@ def test_query_golden_config_setting(self): "intended_path_template": "test/intended", "jinja_path_template": "{{jinja_path}}", "backup_test_connectivity": True, - "sot_agg_query": "{test_model}", + "sot_agg_query": {"name": "GC-SoTAgg-Query-1"}, } ] } diff --git a/nautobot_golden_config/tests/test_models.py b/nautobot_golden_config/tests/test_models.py index d93cfae9..8b557e17 100644 --- a/nautobot_golden_config/tests/test_models.py +++ b/nautobot_golden_config/tests/test_models.py @@ -5,7 +5,7 @@ from django.db.utils import IntegrityError from django.core.exceptions import ValidationError from nautobot.dcim.models import Platform -from nautobot.extras.models import GitRepository +from nautobot.extras.models import GitRepository, GraphQLQuery from nautobot_golden_config.tests.conftest import create_git_repos from nautobot_golden_config.models import ( @@ -15,7 +15,7 @@ ConfigReplace, ) -from .conftest import create_device, create_feature_rule_json, create_config_compliance +from .conftest import create_device, create_feature_rule_json, create_config_compliance, create_saved_queries class ConfigComplianceModelTestCase(TestCase): @@ -89,6 +89,7 @@ class GoldenConfigSettingModelTestCase(TestCase): def setUp(self): """Get the golden config settings with the only allowed id.""" create_git_repos() + create_saved_queries() # Since we enforce a singleton pattern on this model, nuke the auto-created object. GoldenConfigSetting.objects.all().delete() @@ -112,12 +113,6 @@ def test_absolute_url_success(self): url_string = self.global_settings.get_absolute_url() self.assertEqual(url_string, f"/plugins/golden-config/setting/{self.global_settings.slug}/") - def test_bad_graphql_query(self): - """Invalid graphql query.""" - self.global_settings.sot_agg_query = 'devices(name:"ams-edge-01")' - with self.assertRaises(ValidationError): - self.global_settings.clean() - def test_bad_scope(self): """Verify that a bad value in the scope returns the expected error.""" self.global_settings.scope = json_loads('{"has_primary_ip": true, "role": ["Apple"]}') @@ -130,14 +125,14 @@ def test_bad_scope(self): def test_good_graphql_query_invalid_starts_with(self): """Valid graphql query, however invalid in the usage with golden config plugin.""" - self.global_settings.sot_agg_query = '{devices(name:"ams-edge-01"){id}}' + self.global_settings.sot_agg_query = GraphQLQuery.objects.get(name="GC-SoTAgg-Query-3") with self.assertRaises(ValidationError) as error: self.global_settings.clean() self.assertEqual(error.exception.message, "The GraphQL query must start with exactly `query ($device_id: ID!)`") def test_good_graphql_query_validate_starts_with(self): """Ensure clean() method returns None when valid query is sent through.""" - self.global_settings.sot_agg_query = "query ($device_id: ID!) {device(id: $device_id) {id}}" + self.global_settings.sot_agg_query = GraphQLQuery.objects.get(name="GC-SoTAgg-Query-1") self.assertEqual(self.global_settings.clean(), None) def test_good_scope(self): diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index c17714af..fa4bb098 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -397,7 +397,7 @@ def diff_structured_data(backup_data, intended_data): structure_format = request.GET.get("format") settings = get_device_to_settings_map(queryset=Device.objects.filter(pk=device.pk))[device.id] - _, output = graph_ql_query(request, device, settings.sot_agg_query) + _, output = graph_ql_query(request, device, settings.sot_agg_query.query) if structure_format == "yaml": output = yaml.dump(json.loads(json.dumps(output)), default_flow_style=False) From 250269113582077f43df9417bf967c630e17a84d Mon Sep 17 00:00:00 2001 From: Ken Celenza Date: Thu, 17 Mar 2022 21:50:36 -0400 Subject: [PATCH 31/36] Change sot_agg_query on_delete to protect (#227) --- .../migrations/0016_convert_sotagg_queries_part3.py | 2 +- nautobot_golden_config/models.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py b/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py index 5bc66d31..e2195fd8 100644 --- a/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py +++ b/nautobot_golden_config/migrations/0016_convert_sotagg_queries_part3.py @@ -21,7 +21,7 @@ class Migration(migrations.Migration): field=models.ForeignKey( blank=True, null=True, - on_delete=django.db.models.deletion.SET_NULL, + on_delete=django.db.models.deletion.PROTECT, related_name="sot_aggregation", to="extras.graphqlquery", ), diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py index 51366ecf..5531069c 100644 --- a/nautobot_golden_config/models.py +++ b/nautobot_golden_config/models.py @@ -479,7 +479,7 @@ class GoldenConfigSetting(PrimaryModel): ) sot_agg_query = models.ForeignKey( to="extras.GraphQLQuery", - on_delete=models.SET_NULL, + on_delete=models.PROTECT, null=True, blank=True, related_name="sot_aggregation", From ae19312d950c241059612c4f62779d2d25d96d86 Mon Sep 17 00:00:00 2001 From: Nautobot-Bot <79372327+nautobot-bot@users.noreply.github.com> Date: Fri, 18 Mar 2022 03:30:51 +0100 Subject: [PATCH 32/36] Update dependency mariadb to v10.8 (#211) Co-authored-by: Renovate Bot --- development/docker-compose.mysql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml index bde8b14e..acfc5a9c 100644 --- a/development/docker-compose.mysql.yml +++ b/development/docker-compose.mysql.yml @@ -1,7 +1,7 @@ --- services: db: - image: "mariadb:10.7" + image: "mariadb:10.8" env_file: - "dev.env" volumes: From 8d2beb2098688e209fd7280a37c706fb018ddd97 Mon Sep 17 00:00:00 2001 From: Ken Celenza Date: Thu, 17 Mar 2022 23:08:18 -0400 Subject: [PATCH 33/36] prep 1.0.0-beta release (#190) * prep 1.0.0-beta release --- CHANGELOG.md | 30 +++++++++++++++++++++++++++--- README.md | 5 +++-- nautobot_golden_config/__init__.py | 4 +++- nautobot_golden_config/views.py | 10 +--------- poetry.lock | 8 ++++---- pyproject.toml | 3 ++- 6 files changed, 40 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bd48518..1b35f7fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,37 @@ # Changelog -## v1.0.0 - 2022-XX-XX +## v1.0.0-beta - 2022-22 + +### Announcements + +- Nautobot Golden Config 1.0.X will officially only support versions 1.2.0 - 1.3.99 ### Added -- Added utility function to determine the local filesystem path which stores the backup and intended repository files for a given device. +- #180 Added Renovate for proactive package management +- #158 Allow for Jinja2 Filters to be used by GoldenConfig templates +- #225 Added support for nautobot secrets group on git repos +- #205 Added support for multiple golden config settings +- #206 Add Git datasource to load GC properties +- #218 Added ability to storre SoTAgg field leveraging Nautobot saved GraphQl query ### Changed -- [#205](https://github.com/nautobot/nautobot-plugin-golden-config/pull/205) - Multiple Golden Config Settings allows for multiple instances of the plugin settings + +- #171 Changed the release policy +- #158 Changed variable job_result to nautobot_job +- #186 Update mariadb Docker tag to v10.7 +- #187 Update postgres Docker tag to v14 +- #188 Update Markdown dependency +- #190 Update to Nautobot 1.2.0 +- #190 Remove Nautobot 1.0 specific code +- #211 Update dependency mariadb to v10.8 + +### Fixed + +- #176 Fixed Pylint issue +- #182 Add reference to Nornir plugin for installation +- #183 Fixed documentation for sot_agg_transposer default +- #184 Fix markdown links in quick-start ## v0.9.10 - 2021-11 diff --git a/README.md b/README.md index 44c11bc0..518fc1a3 100644 --- a/README.md +++ b/README.md @@ -110,8 +110,9 @@ for any security enhancements or major bugs will be supported for a limited time | Golden Config Version | Nautobot First Support Version | Nautobot Last Support Version | | --------------------- | ------------------------------ | ----------------------------- | -| 0.9.X | 1.0 | 1.2 [Official] | -| 1.0.X | 1.2 | 1.2 [Tentative] | +| 0.9.X | 1.0.0 | 1.2.99 [Official] | +| 0.10.X | 1.0.0 | 1.2.99 [Official] | +| 1.0.X | 1.2.0 | 1.3.99 [Official] | ## CLI Helper Commands diff --git a/nautobot_golden_config/__init__.py b/nautobot_golden_config/__init__.py index c39d5b48..5bdf1a70 100644 --- a/nautobot_golden_config/__init__.py +++ b/nautobot_golden_config/__init__.py @@ -1,6 +1,6 @@ """Plugin declaration for nautobot_golden_config.""" -__version__ = "1.0.0-beta" +__version__ = "1.0.0-beta.1" from nautobot.extras.plugins import PluginConfig @@ -15,6 +15,8 @@ class GoldenConfig(PluginConfig): author_email = "opensource@networktocode.com" description = "A plugin for managing Golden Configurations." base_url = "golden-config" + min_version = "1.2.0" + max_version = "1.3.99" default_settings = { "enable_backup": True, "enable_compliance": True, diff --git a/nautobot_golden_config/views.py b/nautobot_golden_config/views.py index fa4bb098..6ad3ed8b 100644 --- a/nautobot_golden_config/views.py +++ b/nautobot_golden_config/views.py @@ -8,7 +8,6 @@ from datetime import datetime, timezone import matplotlib.pyplot as plt -import nautobot import numpy as np import yaml from django.contrib import messages @@ -25,7 +24,6 @@ from nautobot.utilities.forms import ConfirmationForm from nautobot.utilities.utils import csv_format from nautobot.utilities.views import ContentTypePermissionRequiredMixin -from packaging.version import Version from nautobot_golden_config import filters, forms, models, tables from nautobot_golden_config.utilities.constant import CONFIG_FEATURES, ENABLE_COMPLIANCE, PLUGIN_CFG @@ -36,7 +34,6 @@ GREEN = "#D5E8D4" RED = "#F8CECC" -NAUTOBOT_VERSION = Version(nautobot.__version__) # # GoldenConfig @@ -470,11 +467,6 @@ def diff_structured_data(backup_data, intended_data): template_name = "nautobot_golden_config/configcompliancedetails.html" if request.GET.get("modal") == "true": template_name = "nautobot_golden_config/configcompliancedetails_modal.html" - include_file = "extras/inc/json_format.html" - - # Nautobot core update template name, for backwards compat - if NAUTOBOT_VERSION < Version("1.1"): - include_file = "extras/inc/configcontext_format.html" return render( request, @@ -485,7 +477,7 @@ def diff_structured_data(backup_data, intended_data): "config_type": config_type, "format": structure_format, "device": device, - "include_file": include_file, + "include_file": "extras/inc/json_format.html", }, ) diff --git a/poetry.lock b/poetry.lock index 952aa9f4..29dd5fc9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -227,7 +227,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "ciscoconfparse" -version = "1.6.10" +version = "1.6.11" description = "Parse, Audit, Query, Build, and Modify Cisco IOS-style and JunOS-style configurations" category = "main" optional = false @@ -2193,7 +2193,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "d4ecd6f04c9ca5f7751e58b5045a69ef4fad75e2816ea0230f6b668d667e7af9" +content-hash = "63c416d9e1d53f0e0f1fa1753f5e72e50b7d4f42b6af4f0ea7a19b5e54e78120" [metadata.files] aiocontextvars = [ @@ -2310,8 +2310,8 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.9-py3-none-any.whl", hash = "sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721"}, ] ciscoconfparse = [ - {file = "ciscoconfparse-1.6.10-py3-none-any.whl", hash = "sha256:2e7c5a3c9b0b12df9825c850ff78a8a3b7d1adda8c62f3ac1b6aa0746f97ec23"}, - {file = "ciscoconfparse-1.6.10.tar.gz", hash = "sha256:aa0329e240013b8a72cff8f1c749e565ed00554f64dc0bf20889054aea0ba640"}, + {file = "ciscoconfparse-1.6.11-py3-none-any.whl", hash = "sha256:d136f1b9f842e666fd2eb33f9452111cf83b330912b7325e0701d1bec3bedee8"}, + {file = "ciscoconfparse-1.6.11.tar.gz", hash = "sha256:e1cb71784c6f30e6de269025e61ec0c9a6e86291c7423dc3d613da7ba608e4b3"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, diff --git a/pyproject.toml b/pyproject.toml index 8395dbb6..48906ece 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-golden-config" -version = "1.0.0-beta" +version = "1.0.0-beta.1" description = "A plugin for configuration on nautobot" authors = ["Network to Code, LLC", ""] @@ -35,6 +35,7 @@ deepdiff = "^5.5.0" django-pivot = "^1.8.1" matplotlib = "^3.3.2" nautobot-plugin-nornir = ">=0.9.7" +nautobot = ">=1.2.0" [tool.poetry.dev-dependencies] bandit = "*" From 023515de9d2669c82264c19fe7fc8909b6e3890a Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Thu, 17 Mar 2022 23:35:43 -0400 Subject: [PATCH 34/36] fix permissions for replace in navigation --- nautobot_golden_config/navigation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_golden_config/navigation.py b/nautobot_golden_config/navigation.py index 5f668fc9..c867aa49 100644 --- a/nautobot_golden_config/navigation.py +++ b/nautobot_golden_config/navigation.py @@ -75,14 +75,14 @@ PluginMenuItem( link="plugins:nautobot_golden_config:configreplace_list", link_text="Config Replacements", - permissions=["nautobot_golden_config.view_compliancereplace"], + permissions=["nautobot_golden_config.view_configreplace"], buttons=( PluginMenuButton( link="plugins:nautobot_golden_config:configreplace_add", title="Config Replace", icon_class="mdi mdi-plus-thick", color=ButtonColorChoices.GREEN, - permissions=["nautobot_golden_config.add_compliancereplace"], + permissions=["nautobot_golden_config.add_configreplace"], ), ), ) From 7abf8430459debc3e9d6d7e7c3b31e487f1e847e Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Sun, 20 Mar 2022 21:04:13 -0400 Subject: [PATCH 35/36] Move navigation, fix #165, update changelog --- CHANGELOG.md | 8 +- docs/navigating-backup.md | 4 +- docs/navigating-compliance-json.md | 2 +- docs/navigating-compliance.md | 9 +- docs/navigating-golden.md | 2 +- docs/navigating-intended.md | 2 +- docs/quick-start.md | 16 ++-- nautobot_golden_config/forms.py | 4 +- nautobot_golden_config/navigation.py | 82 +++++++++++-------- .../compliance_report.html | 10 --- 10 files changed, 74 insertions(+), 65 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b35f7fd..1747d1fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,10 +10,11 @@ - #180 Added Renovate for proactive package management - #158 Allow for Jinja2 Filters to be used by GoldenConfig templates -- #225 Added support for nautobot secrets group on git repos -- #205 Added support for multiple golden config settings +- #167 Added support for multiple repos +- #205 Added support for multiple repos via multiple golden config settings - #206 Add Git datasource to load GC properties - #218 Added ability to storre SoTAgg field leveraging Nautobot saved GraphQl query +- #225 Added support for nautobot secrets group on git repos ### Changed @@ -25,6 +26,7 @@ - #190 Update to Nautobot 1.2.0 - #190 Remove Nautobot 1.0 specific code - #211 Update dependency mariadb to v10.8 +- #229 Updated navigation to a dedicated top level menu ### Fixed @@ -32,6 +34,8 @@ - #182 Add reference to Nornir plugin for installation - #183 Fixed documentation for sot_agg_transposer default - #184 Fix markdown links in quick-start +- #194 Detailed Error Handling in get_job_filter helper +- #229 Fixed #165, Configuration Compliance List View "Device" filter doesn't work ## v0.9.10 - 2021-11 diff --git a/docs/navigating-backup.md b/docs/navigating-backup.md index 06e31a36..29a66988 100644 --- a/docs/navigating-backup.md +++ b/docs/navigating-backup.md @@ -55,7 +55,7 @@ complicated use cases, please refer to the plugin documentation linked above. To start a backup job manually: -1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +1. Navigate to the Plugin Home (Golden Config->Home), with Home being in the `Golden Configuration` section 2. Select _Execute_ on the upper right buttons, then _Backup_ 3. Fill in the data that you wish to have backed up 4. Select _Run Job_ @@ -65,7 +65,7 @@ To start a backup job manually: The line removals settings is a series of regex patterns to identify lines that should be removed. This is helpful as there are usually parts of the configurations that will change each time. A match simply means to remove. -In order to specify line removals. Navigate to **Plugins -> Config Removals**. Click the **Add** button and fill out the details. +In order to specify line removals. Navigate to **Golden Config -> Config Removals**. Click the **Add** button and fill out the details. The remove setting is based on `Platform`. An example is shown below. ![Config Removals View](./img/00-navigating-backup.png) diff --git a/docs/navigating-compliance-json.md b/docs/navigating-compliance-json.md index 6ddf7b8a..ecfc9687 100644 --- a/docs/navigating-compliance-json.md +++ b/docs/navigating-compliance-json.md @@ -54,7 +54,7 @@ Once the API call is made the response data provides a quick snapshot. You can also see the compliance data in the UI once it is created via API. -In the navigation menu: `Plugins -> Configuration Compliance`. +In the navigation menu: `Golden Config -> Configuration Compliance`. ![Example Compliance Run in UI](./img/03-navigating-compliance-json.png) diff --git a/docs/navigating-compliance.md b/docs/navigating-compliance.md index 041eeecf..e95368bd 100644 --- a/docs/navigating-compliance.md +++ b/docs/navigating-compliance.md @@ -33,7 +33,7 @@ In order to generate the intended configurations, a minimum of two repositories To start a compliance job manually: -1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +1. Navigate to `Golden Config->Home`, with Home being in the `Golden Configuration` section 2. Select _Execute_ on the upper right buttons, then _Compliance_ 3. Fill in the data that you wish to have a compliance report generated for 4. Select _Run Job_ @@ -112,7 +112,7 @@ router bgp 65250 Configuration compliance requires the Git Repo settings for `config backups` and `intended configs`--which are covered in their respective sections--regardless if they are actually managed via the plugin or not. The same is true for the `Backup Path` and `Intended Path`. -The Configuration compliance rule map must be created per the operator/user. You can find these configurations via `Plugins -> Compliance Rules` +The Configuration compliance rule map must be created per the operator/user. You can find these configurations via `Golden Config -> Compliance Rules` links, which brings up the specific configurations. ![Configuration Rule](./img/compliance-rule.png) @@ -170,8 +170,9 @@ There is a global overview or executive summary that provides a high level snaps ## Detail Report -This can be accessed via the Plugins drop-down via `Compliance` details button. From there you can filter the devices via the form on the right side, limit the columns with the `Configure` button, or -bulk delete with the `Delete` button. Additionally each device is click-able to view the details of that individual device. +You can view the details from the `Compliance` details button within the `Configuration Compliance` table. From there you can filter the devices via the +form on the right side, limit the columns with the `Configure` button, or bulk delete with the `Delete` button. Additionally each device is click-able to view +the details of that individual device. You can configure the columns to limit how much is showing on one screen. diff --git a/docs/navigating-golden.md b/docs/navigating-golden.md index 061fc46d..0a124957 100644 --- a/docs/navigating-golden.md +++ b/docs/navigating-golden.md @@ -36,7 +36,7 @@ Each Job attempts to provide sane error handling, and respects the `debug` flag ## Application Settings -The golden configuration plugin settings can be found by navigating to `Plugins -> Settings` button. Select one of the Settings, under the `Golden Configuration` section. +The golden configuration plugin settings can be found by navigating to `Golden Config -> Settings` button. Select one of the Settings, under the `Golden Configuration` section. Since Golden Configuration Plugin version 1.0, the plugin allows for multiple settings to be configured by the User. Each of the settings, has the individual repositories and configuration details, as well as the scope. You could use a combination of settings to customize Your Configuration Compliance behavior. diff --git a/docs/navigating-intended.md b/docs/navigating-intended.md index 7a02790b..5a02dcae 100644 --- a/docs/navigating-intended.md +++ b/docs/navigating-intended.md @@ -83,7 +83,7 @@ from custom_jinja_filters import config_templates To start a intended configuration job manually: -1. Navigate to the Plugin Home (Plugins->Home), with Home being in the `Golden Configuration` section +1. Navigate to `Golden Config -> Home`, with Home being in the `Golden Configuration` section 2. Select _Execute_ on the upper right buttons, then _Intended_ 3. Fill in the data that you wish to have configurations generated for up 4. Select _Run Job_ diff --git a/docs/quick-start.md b/docs/quick-start.md index 65115f26..ac2436ba 100644 --- a/docs/quick-start.md +++ b/docs/quick-start.md @@ -21,7 +21,7 @@ Follow the steps below to get up and running for the configuration backup elemen 3. Next, make sure to create new or update existing Plugins **Settings** with the backup details. - 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 1. Navigate to `Golden Config -> Settings` under the Golden Configuration Section. 2. Create new or select one of the existing `Settings` objects 3. Fill out the Backup Repository. (The dropdown will show the repository that was just created.) 4. Fill out Backup Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) @@ -35,7 +35,7 @@ Follow the steps below to get up and running for the configuration backup elemen 5. Execute the Backup. - 1. Navigate to `Plugins -> Home` under the Golden Configuration Section. + 1. Navigate to `Golden Config -> Home` under the Golden Configuration Section. 2. Click on the `Execute` button and select `Backup`. 3. Select what to run the backup on. 4. Run the Job by clicking "Run Job" button. @@ -68,7 +68,7 @@ Follow the steps below to get up and running for the intended configuration elem 4. Next, make sure to create new or update existing Plugins **Settings** with the intended and jinja2 template details. - 1. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 1. Navigate to `Golden Config -> Settings` under the Golden Configuration Section. 2. Create new or select one of the existing `Settings` objects 3. Fill out the Intended Repository. (The dropdown will show the repository that was just created.) 4. Fill out Intended Path Template. Typically `{{obj.site.slug}}/{{obj.name}}.cfg`, see [Setting Details](./navigating-golden.md#application-settings) @@ -82,12 +82,12 @@ Follow the steps below to get up and running for the intended configuration elem 3. Populate the GraphQL data. 4. Make sure to follow the format specified in the **GraphQL** section in [Source of Truth Agg Details](./navigating-sot-agg.md) 5. Click Create. - 6. Navigate to `Plugins -> Settings` under the Golden Configuration Section. + 6. Navigate to `Golden Config -> Settings` under the Golden Configuration Section. 7. Select a SoTAgg Saved Query. (The dropdown will show the GraphQL query that was just created.) 6. Execute the Intended. - 1. Navigate to `Plugins -> Home`. + 1. Navigate to `Golden Config -> Home`. 2. Click on the `Execute` button and select `Intended`. 3. Select what to run the intended generation on. 4. Run the Job. @@ -103,17 +103,17 @@ Compliance requires Backups and Intended Configurations in order to be executed. 3. Follow the steps in [Intended Configuration](#intended-configuration). 4. Create a Compliance Feature. - 1. Navigate to `Plugins -> Compliance Feature`. + 1. Navigate to `Golden Config -> Compliance Feature`. 2. Click Add and give the feature a name. Typically this is based on the configuration snippet or section. E.g. "aaa". 5. Create a Compliance Rule. - 1. Navigate to `Plugins -> Compliance Rules`. + 1. Navigate to `Golden Config -> Compliance Rules`. 2. Click Add and populate the fields, make sure the rule is linked to the feature created previously. See [Configuration Compliance Settings](./navigating-compliance.md#configuration-compliance-settings) for details. 6. Execute Compliance Check. - 1. Navigate to `Plugins -> Configuration Compliance`. + 1. Navigate to `Golden Config -> Configuration Compliance`. 2. Click on the `Execute` button and select `Compliance`. 3. Select what to run the compliance on. 4. Run the Job. diff --git a/nautobot_golden_config/forms.py b/nautobot_golden_config/forms.py index 11088cb7..a4f640a2 100644 --- a/nautobot_golden_config/forms.py +++ b/nautobot_golden_config/forms.py @@ -33,7 +33,7 @@ class ConfigComplianceFilterForm(utilities_forms.BootstrapMixin, extras_forms.Cu "platform", "device_status", "device_type_id", - "device", + "device_id", ] q = forms.CharField(required=False, label="Search") @@ -83,7 +83,7 @@ class ConfigComplianceFilterForm(utilities_forms.BootstrapMixin, extras_forms.Cu platform = utilities_forms.DynamicModelMultipleChoiceField( queryset=Platform.objects.all(), to_field_name="slug", required=False, null_option="None" ) - device = utilities_forms.DynamicModelMultipleChoiceField( + device_id = utilities_forms.DynamicModelMultipleChoiceField( queryset=Device.objects.all(), required=False, null_option="None", label="Device" ) diff --git a/nautobot_golden_config/navigation.py b/nautobot_golden_config/navigation.py index c867aa49..15796086 100644 --- a/nautobot_golden_config/navigation.py +++ b/nautobot_golden_config/navigation.py @@ -1,53 +1,59 @@ """Add the configuration compliance buttons to the Plugins Navigation.""" -from nautobot.extras.plugins import PluginMenuItem, PluginMenuButton +from nautobot.core.apps import NavMenuGroup, NavMenuItem, NavMenuTab, NavMenuButton from nautobot.utilities.choices import ButtonColorChoices from nautobot_golden_config.utilities.constant import ENABLE_COMPLIANCE, ENABLE_BACKUP - -plugin_items = [ - PluginMenuItem( +items = [ + NavMenuItem( link="plugins:nautobot_golden_config:goldenconfig_list", - link_text="Home", + name="Home", permissions=["nautobot_golden_config.view_goldenconfig"], ) ] if ENABLE_COMPLIANCE: - plugin_items.append( - PluginMenuItem( + items.append( + NavMenuItem( link="plugins:nautobot_golden_config:configcompliance_list", - link_text="Configuration Compliance", + name="Configuration Compliance", + permissions=["nautobot_golden_config.view_configcompliance"], + ) + ) + items.append( + NavMenuItem( + link="plugins:nautobot_golden_config:configcompliance_report", + name="Compliance Report", permissions=["nautobot_golden_config.view_configcompliance"], ) ) - plugin_items.append( - PluginMenuItem( + items.append( + NavMenuItem( link="plugins:nautobot_golden_config:compliancerule_list", - link_text="Compliance Rules", + name="Compliance Rules", permissions=["nautobot_golden_config.view_compliancerule"], buttons=( - PluginMenuButton( + NavMenuButton( link="plugins:nautobot_golden_config:compliancerule_add", title="Compliance Rules", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, + button_class=ButtonColorChoices.GREEN, permissions=["nautobot_golden_config.add_compliancerule"], ), ), ) ) - plugin_items.append( - PluginMenuItem( + items.append( + NavMenuItem( link="plugins:nautobot_golden_config:compliancefeature_list", - link_text="Compliance Features", + name="Compliance Features", permissions=["nautobot_golden_config.view_compliancefeature"], buttons=( - PluginMenuButton( + NavMenuButton( link="plugins:nautobot_golden_config:compliancefeature_add", title="Compliance Features", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, + button_class=ButtonColorChoices.GREEN, permissions=["nautobot_golden_config.add_compliancefeature"], ), ), @@ -55,54 +61,62 @@ ) if ENABLE_BACKUP: - plugin_items.append( - PluginMenuItem( + items.append( + NavMenuItem( link="plugins:nautobot_golden_config:configremove_list", - link_text="Config Removals", + name="Config Removals", permissions=["nautobot_golden_config.view_configremove"], buttons=( - PluginMenuButton( + NavMenuButton( link="plugins:nautobot_golden_config:configremove_add", title="Config Remove", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, + button_class=ButtonColorChoices.GREEN, permissions=["nautobot_golden_config.add_configremove"], ), ), ) ) - plugin_items.append( - PluginMenuItem( + items.append( + NavMenuItem( link="plugins:nautobot_golden_config:configreplace_list", - link_text="Config Replacements", + name="Config Replacements", permissions=["nautobot_golden_config.view_configreplace"], buttons=( - PluginMenuButton( + NavMenuButton( link="plugins:nautobot_golden_config:configreplace_add", title="Config Replace", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, + button_class=ButtonColorChoices.GREEN, permissions=["nautobot_golden_config.add_configreplace"], ), ), ) ) -plugin_items.append( - PluginMenuItem( + +items.append( + NavMenuItem( link="plugins:nautobot_golden_config:goldenconfigsetting_list", - link_text="Settings", + name="Settings", permissions=["nautobot_golden_config.view_goldenconfigsetting"], buttons=( - PluginMenuButton( + NavMenuButton( link="plugins:nautobot_golden_config:goldenconfigsetting_add", title="Add", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, + button_class=ButtonColorChoices.GREEN, permissions=["nautobot_golden_config.change_goldenconfigsetting"], ), ), ), ) -menu_items = tuple(plugin_items) + +menu_items = ( + NavMenuTab( + name="Golden Config", + weight=1000, + groups=(NavMenuGroup(name="Golden Config", weight=100, items=tuple(items)),), + ), +) diff --git a/nautobot_golden_config/templates/nautobot_golden_config/compliance_report.html b/nautobot_golden_config/templates/nautobot_golden_config/compliance_report.html index 6436dedf..049c83bb 100644 --- a/nautobot_golden_config/templates/nautobot_golden_config/compliance_report.html +++ b/nautobot_golden_config/templates/nautobot_golden_config/compliance_report.html @@ -6,16 +6,6 @@ {% block content %}
{% block buttons %} -
- - -