From 1b08baf036c9e7a897c5a09c53815f6a3201a990 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 30 Apr 2024 11:53:51 -0500 Subject: [PATCH 001/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Refactor=20IPAddr?= =?UTF-8?q?esses=20loading=20from=20Infoblox=20to=20ignore=20duplicate=20I?= =?UTF-8?q?Ps=20and=20log=20when=20found.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../infoblox/diffsync/adapters/infoblox.py | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 53d02d31a..a63bc9795 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -109,17 +109,21 @@ def load_ipaddresses(self): if _ip["names"]: dns_name = get_dns_name(possible_fqdn=_ip["names"][0]) ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {})) - new_ip = self.ipaddress( - address=_ip["ip_address"], - prefix=_ip["network"], - prefix_length=prefix_length, - dns_name=dns_name, - status=self.conn.get_ipaddr_status(_ip), - ip_addr_type=self.conn.get_ipaddr_type(_ip), - description=_ip["comment"], - ext_attrs={**default_ext_attrs, **ip_ext_attrs}, + _, loaded = self.get_or_instantiate( + self.ipaddress, + ids={"address": _ip["ip_address"], "prefix": _ip["network"], "prefix_length": prefix_length}, + attrs={ + "dns_name": dns_name, + "status": self.conn.get_ipaddr_status(_ip), + "ip_addr_type": self.conn.get_ipaddr_type(_ip), + "description": _ip["comment"], + "ext_attrs": {**default_ext_attrs, **ip_ext_attrs}, + }, ) - self.add(new_ip) + if not loaded: + self.job.logger.warning( + f"Duplicate IP Address {_ip['address']}/{prefix_length} in {_ip['network']} attempting to be loaded." + ) def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" From 4dd69e7f4fca1516b1289024049c6acb5d4dbec4 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 30 Apr 2024 13:29:13 -0500 Subject: [PATCH 002/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20snippet?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/443.fixed | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/443.fixed diff --git a/changes/443.fixed b/changes/443.fixed new file mode 100644 index 000000000..f904df814 --- /dev/null +++ b/changes/443.fixed @@ -0,0 +1 @@ +Fixed issue with loading duplicate IPAddresses from Infoblox. From 5b31f674010c071ba00128177e4823b363c8374e Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 30 Apr 2024 21:30:42 -0500 Subject: [PATCH 003/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20key=20i?= =?UTF-8?q?n=20log=20to=20be=20ip=5Faddress.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/infoblox/diffsync/adapters/infoblox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index a63bc9795..86543a6e0 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -122,7 +122,7 @@ def load_ipaddresses(self): ) if not loaded: self.job.logger.warning( - f"Duplicate IP Address {_ip['address']}/{prefix_length} in {_ip['network']} attempting to be loaded." + f"Duplicate IP Address {_ip['ip_address']}/{prefix_length} in {_ip['network']} attempting to be loaded." ) def load_vlanviews(self): From 114b6980dc5f2edf31b557fd3332bd03f1bc46bf Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 30 Apr 2024 21:31:14 -0500 Subject: [PATCH 004/229] =?UTF-8?q?style:=20=F0=9F=A7=91=E2=80=8D?= =?UTF-8?q?=F0=9F=92=BB=20Add=20debug=20logging=20to=20load=20methods=20to?= =?UTF-8?q?=20provide=20more=20information=20on=20what's=20happening.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/infoblox/diffsync/adapters/infoblox.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 86543a6e0..2dd1622d5 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -50,6 +50,8 @@ def __init__(self, *args, job=None, sync=None, conn, **kwargs): def load_prefixes(self): """Load InfobloxNetwork DiffSync model.""" if PLUGIN_CFG.get("infoblox_import_subnets"): + if self.job.debug: + self.job.logger.debug(f"Loading Subnets from Infoblox.") subnets = [] containers = [] for prefix in PLUGIN_CFG["infoblox_import_subnets"]: @@ -101,6 +103,8 @@ def load_prefixes(self): def load_ipaddresses(self): """Load InfobloxIPAddress DiffSync model.""" + if self.job.debug: + self.job.logger.debug(f"Loading IP addresses from Infoblox.") ipaddrs = self.conn.get_all_ipv4address_networks(prefixes=self.subnets) default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs) for _ip in ipaddrs: @@ -127,6 +131,8 @@ def load_ipaddresses(self): def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" + if self.job.debug: + self.job.logger.debug(f"Loading VLAN Views from Infoblox.") vlanviews = self.conn.get_vlanviews() default_ext_attrs = get_default_ext_attrs(review_list=vlanviews) for _vv in vlanviews: @@ -140,6 +146,8 @@ def load_vlanviews(self): def load_vlans(self): """Load InfobloxVlan DiffSync model.""" + if self.job.debug: + self.job.logger.debug(f"Loading VLANs from Infoblox.") vlans = self.conn.get_vlans() default_ext_attrs = get_default_ext_attrs(review_list=vlans) for _vlan in vlans: From 8c4ec2bece416123561550f460900f59f5a15b61 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 2 May 2024 10:01:36 -0500 Subject: [PATCH 005/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20some=20add?= =?UTF-8?q?itional=20debug=20logging=20so=20the=20user=20knows=20what's=20?= =?UTF-8?q?occurring.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/infoblox/diffsync/adapters/infoblox.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 2dd1622d5..c47d234d6 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -51,7 +51,7 @@ def load_prefixes(self): """Load InfobloxNetwork DiffSync model.""" if PLUGIN_CFG.get("infoblox_import_subnets"): if self.job.debug: - self.job.logger.debug(f"Loading Subnets from Infoblox.") + self.job.logger.debug("Loading Subnets from Infoblox.") subnets = [] containers = [] for prefix in PLUGIN_CFG["infoblox_import_subnets"]: @@ -104,7 +104,7 @@ def load_prefixes(self): def load_ipaddresses(self): """Load InfobloxIPAddress DiffSync model.""" if self.job.debug: - self.job.logger.debug(f"Loading IP addresses from Infoblox.") + self.job.logger.debug("Loading IP addresses from Infoblox.") ipaddrs = self.conn.get_all_ipv4address_networks(prefixes=self.subnets) default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs) for _ip in ipaddrs: @@ -132,7 +132,7 @@ def load_ipaddresses(self): def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" if self.job.debug: - self.job.logger.debug(f"Loading VLAN Views from Infoblox.") + self.job.logger.debug("Loading VLAN Views from Infoblox.") vlanviews = self.conn.get_vlanviews() default_ext_attrs = get_default_ext_attrs(review_list=vlanviews) for _vv in vlanviews: @@ -147,7 +147,7 @@ def load_vlanviews(self): def load_vlans(self): """Load InfobloxVlan DiffSync model.""" if self.job.debug: - self.job.logger.debug(f"Loading VLANs from Infoblox.") + self.job.logger.debug("Loading VLANs from Infoblox.") vlans = self.conn.get_vlans() default_ext_attrs = get_default_ext_attrs(review_list=vlans) for _vlan in vlans: From 280b50a5aedffe0e226004732b08bd92179b950a Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 2 May 2024 16:02:45 -0500 Subject: [PATCH 006/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Remove?= =?UTF-8?q?=20deprecated=20command=20for=20MySQL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/docker-compose.mysql.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml index 062ada948..2f1103da4 100644 --- a/development/docker-compose.mysql.yml +++ b/development/docker-compose.mysql.yml @@ -19,7 +19,6 @@ services: db: image: "mysql:8" command: - - "--default-authentication-plugin=mysql_native_password" - "--max_connections=1000" env_file: - "development.env" From bfdc1a00abe0f5735085861cc2aa176e01094e91 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 2 May 2024 16:09:17 -0500 Subject: [PATCH 007/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20setting?= =?UTF-8?q?=20to=20be=20infoblox=5Fnetwork=5Fview.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/infoblox/utils/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index bae55d40a..450a5fce2 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -1391,8 +1391,8 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False): "_return_fields": "network,comment,network_view,extattrs,rir_organization,rir", "_max_results": 100000, } - if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): - params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) + if PLUGIN_CFG.get("infoblox_network_view"): + params.update({"network_view": PLUGIN_CFG["infoblox_network_view"]}) if prefix: params.update({"network": prefix}) response = self._request("GET", url_path, params=params) From b0bf6e05967d9c536789a88e283726a49dffe241 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 2 May 2024 17:01:21 -0500 Subject: [PATCH 008/229] =?UTF-8?q?revert:=20=E2=8F=AA=EF=B8=8F=20Revert?= =?UTF-8?q?=20key=20change.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/infoblox/utils/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 450a5fce2..bae55d40a 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -1391,8 +1391,8 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False): "_return_fields": "network,comment,network_view,extattrs,rir_organization,rir", "_max_results": 100000, } - if PLUGIN_CFG.get("infoblox_network_view"): - params.update({"network_view": PLUGIN_CFG["infoblox_network_view"]}) + if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): + params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) if prefix: params.update({"network": prefix}) response = self._request("GET", url_path, params=params) From 357d2ced85144ecc4b461d3b8cace470e983c138 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 6 May 2024 07:56:48 -0500 Subject: [PATCH 009/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20missing=20?= =?UTF-8?q?attribution=20for=20Device42=20integration.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index bc62e308d..b287c9fae 100644 --- a/README.md +++ b/README.md @@ -114,6 +114,9 @@ This project includes code originally written in separate Nautobot apps, which h [@nniehoff](https://github.com/nniehoff), [@qduk](https://github.com/qduk), [@ubajze](https://github.com/ubajze) +- [nautobot-plugin-ssot-device42](https://github.com/nautobot/nautobot-plugin-ssot-device42): + Thanks + [@jdrew82](https://github.com/jdrew82) - [nautobot-plugin-ssot-infoblox](https://github.com/nautobot/nautobot-plugin-ssot-infoblox): Thanks [@FragmentedPacket](https://github.com/FragmentedPacket), From 5d4f37e621355ba6b0962afe8fbc7b92fc30269a Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 6 May 2024 08:01:56 -0500 Subject: [PATCH 010/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20snippet?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/450.documentation | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/450.documentation diff --git a/changes/450.documentation b/changes/450.documentation new file mode 100644 index 000000000..bb1fd3a38 --- /dev/null +++ b/changes/450.documentation @@ -0,0 +1 @@ +Add missing attribution for Device42 integration to README. From 94d87b8a2ed61203ede4b2f77a22bb3fc14fa8bc Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 6 May 2024 08:13:17 -0500 Subject: [PATCH 011/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Add=20?= =?UTF-8?q?example=20environment=20vars=20for=20config=20settings.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/creds.example.env | 2 ++ development/development.env | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/development/creds.example.env b/development/creds.example.env index 780d04b29..5feb6c71c 100644 --- a/development/creds.example.env +++ b/development/creds.example.env @@ -29,6 +29,8 @@ MYSQL_PASSWORD=${NAUTOBOT_DB_PASSWORD} NAUTOBOT_ARISTACV_CVP_PASSWORD="changeme" NAUTOBOT_ARISTACV_CVP_TOKEN="changeme" +NAUTOBOT_SSOT_DEVICE42_PASSWORD="changeme" + NAUTOBOT_SSOT_INFOBLOX_PASSWORD="changeme" # ACI Credentials. Append friendly name to the end to identify each APIC. diff --git a/development/development.env b/development/development.env index 3eb37964b..d54cc6f1b 100644 --- a/development/development.env +++ b/development/development.env @@ -74,6 +74,10 @@ NAUTOBOT_SSOT_DEVICE42_HOST="" NAUTOBOT_SSOT_DEVICE42_USERNAME="" NAUTOBOT_SSOT_DEVICE42_PASSWORD="" +NAUTOBOT_SSOT_ENABLE_DNA_CENTER="True" +NAUTOBOT_SSOT_DEVICE42_HOST="https://device42.example.com" +NAUTOBOT_SSOT_DEVICE42_USERNAM="changeme" + NAUTOBOT_SSOT_ENABLE_INFOBLOX="True" NAUTOBOT_SSOT_INFOBLOX_DEFAULT_STATUS="Active" NAUTOBOT_SSOT_INFOBLOX_ENABLE_SYNC_TO_INFOBLOX="True" From 046809e41e2ad60042988d3eb1d1c73413f11f3d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 6 May 2024 08:21:42 -0500 Subject: [PATCH 012/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Set=20?= =?UTF-8?q?default=20for=20enable=5Fdevice42=20to=20True=20in=20settings?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/nautobot_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 12c20c603..6ad68fdb8 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -193,7 +193,7 @@ "aristacv_verify": is_truthy(os.getenv("NAUTOBOT_ARISTACV_VERIFY", True)), "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI")), "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV")), - "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42")), + "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42", "True")), "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX")), "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC")), "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW")), From cf9f66e4f6033cc3cd6cceccbaa009647682930a Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 6 May 2024 08:38:44 -0500 Subject: [PATCH 013/229] =?UTF-8?q?ci:=20=F0=9F=91=B7=20Remove=20MySQL=20p?= =?UTF-8?q?assword=20command=20as=20no=20longer=20needed=20and=20breaking?= =?UTF-8?q?=20CI.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/docker-compose.mysql.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml index 062ada948..2f1103da4 100644 --- a/development/docker-compose.mysql.yml +++ b/development/docker-compose.mysql.yml @@ -19,7 +19,6 @@ services: db: image: "mysql:8" command: - - "--default-authentication-plugin=mysql_native_password" - "--max_connections=1000" env_file: - "development.env" From 7b04fa1a306a3bfb5bd31f4039d3ed959e20a89f Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 12:32:08 -0500 Subject: [PATCH 014/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Update?= =?UTF-8?q?=20defaults=20for=20all=20integrations=20to=20be=20disabled.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/development.env | 16 ++++++---------- development/nautobot_config.py | 14 +++++++------- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/development/development.env b/development/development.env index d54cc6f1b..8f23d010e 100644 --- a/development/development.env +++ b/development/development.env @@ -45,7 +45,7 @@ NAUTOBOT_CELERY_TASK_TIME_LIMIT=7200 NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS="False" NAUTOBOT_SSOT_ALLOW_CONFLICTING_APPS="False" -NAUTOBOT_SSOT_ENABLE_ACI="True" +NAUTOBOT_SSOT_ENABLE_ACI="False" NAUTOBOT_SSOT_ACI_TAG="ACI" NAUTOBOT_SSOT_ACI_TAG_COLOR="0047AB" NAUTOBOT_SSOT_ACI_TAG_UP="UP" @@ -57,7 +57,7 @@ NAUTOBOT_SSOT_ACI_IGNORE_TENANTS="[mgmt,infra]" NAUTOBOT_SSOT_ACI_COMMENTS="Created by ACI SSoT Integration" NAUTOBOT_SSOT_ACI_SITE="Data Center" -NAUTOBOT_SSOT_ENABLE_ARISTACV="True" +NAUTOBOT_SSOT_ENABLE_ARISTACV="False" NAUTOBOT_ARISTACV_CONTROLLER_SITE="" NAUTOBOT_ARISTACV_CREATE_CONTROLLER="True" NAUTOBOT_ARISTACV_CVAAS_URL="www.arista.io:443" @@ -69,16 +69,12 @@ NAUTOBOT_ARISTACV_IMPORT_ACTIVE="False" NAUTOBOT_ARISTACV_IMPORT_TAG="False" NAUTOBOT_ARISTACV_VERIFY=True -NAUTOBOT_SSOT_ENABLE_DEVICE42="True" +NAUTOBOT_SSOT_ENABLE_DEVICE42="False" NAUTOBOT_SSOT_DEVICE42_HOST="" NAUTOBOT_SSOT_DEVICE42_USERNAME="" NAUTOBOT_SSOT_DEVICE42_PASSWORD="" -NAUTOBOT_SSOT_ENABLE_DNA_CENTER="True" -NAUTOBOT_SSOT_DEVICE42_HOST="https://device42.example.com" -NAUTOBOT_SSOT_DEVICE42_USERNAM="changeme" - -NAUTOBOT_SSOT_ENABLE_INFOBLOX="True" +NAUTOBOT_SSOT_ENABLE_INFOBLOX="False" NAUTOBOT_SSOT_INFOBLOX_DEFAULT_STATUS="Active" NAUTOBOT_SSOT_INFOBLOX_ENABLE_SYNC_TO_INFOBLOX="True" NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES="True" @@ -92,11 +88,11 @@ NAUTOBOT_SSOT_INFOBLOX_USERNAME="changeme" NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL="True" # NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION="" -NAUTOBOT_SSOT_ENABLE_SERVICENOW="True" +NAUTOBOT_SSOT_ENABLE_SERVICENOW="False" SERVICENOW_INSTANCE="" SERVICENOW_USERNAME="" -NAUTOBOT_SSOT_ENABLE_IPFABRIC="True" +NAUTOBOT_SSOT_ENABLE_IPFABRIC="False" IPFABRIC_HOST="https://ipfabric.example.com" IPFABRIC_SSL_VERIFY="True" IPFABRIC_TIMEOUT=15 diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 6ad68fdb8..b33319dc8 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -191,13 +191,13 @@ "atl01": "Atlanta", }, "aristacv_verify": is_truthy(os.getenv("NAUTOBOT_ARISTACV_VERIFY", True)), - "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI")), - "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV")), - "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42", "True")), - "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX")), - "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC")), - "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW")), - "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), + "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI", "false")), + "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV", "false")), + "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42", "false")), + "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX", "false")), + "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC", "false")), + "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW", "false")), + "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS", "true")), "device42_host": os.getenv("NAUTOBOT_SSOT_DEVICE42_HOST", ""), "device42_username": os.getenv("NAUTOBOT_SSOT_DEVICE42_USERNAME", ""), "device42_password": os.getenv("NAUTOBOT_SSOT_DEVICE42_PASSWORD", ""), From cbbdbe225f15ea14237cbde1694b62464c744f86 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 12:32:37 -0500 Subject: [PATCH 015/229] =?UTF-8?q?test:=20=E2=9C=85=20Fix=20test=20for=20?= =?UTF-8?q?CVP=20that=20required=20Manufacturer=20to=20already=20be=20crea?= =?UTF-8?q?ted.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/aristacv/test_utils_nautobot.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py index 5a5640fc4..026cba75e 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py +++ b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py @@ -16,6 +16,10 @@ class TestNautobotUtils(TestCase): databases = ("default", "job_logs") + def setUp(self): + """Configure shared test vars.""" + self.arista_manu = Manufacturer.objects.get_or_create(name="Arista")[0] + def test_verify_site_success(self): """Test the verify_site method for existing Site.""" loc_type = LocationType.objects.get_or_create(name="Site")[0] @@ -33,9 +37,7 @@ def test_verify_site_fail(self): def test_verify_device_type_object_success(self): """Test the verify_device_type_object for existing DeviceType.""" - new_dt, _ = DeviceType.objects.get_or_create( - model="DCS-7150S-24", manufacturer=Manufacturer.objects.get(name="Arista") - ) + new_dt, _ = DeviceType.objects.get_or_create(model="DCS-7150S-24", manufacturer=self.arista_manu) result = nautobot.verify_device_type_object(device_type="DCS-7150S-24") self.assertEqual(result, new_dt) From d8c1576634d832c0f93d0eac8ce7379e6d51bde2 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 15:12:35 -0500 Subject: [PATCH 016/229] =?UTF-8?q?test:=20=E2=9C=85=20Fix=20tests=20to=20?= =?UTF-8?q?address=20signals=20not=20being=20triggered=20as=20integrations?= =?UTF-8?q?=20are=20disabled=20from=20start.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/infoblox/test_nautobot_adapter.py | 14 +++- .../tests/infoblox/test_tags_and_cfs.py | 65 ++++++++++++++++--- 2 files changed, 69 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 2a8349d09..e8b0fbd4f 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -3,6 +3,7 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase +from nautobot.extras.choices import RelationshipTypeChoices from nautobot.extras.models import Relationship, RelationshipAssociation, Status from nautobot.ipam.models import Prefix, VLAN, VLANGroup @@ -14,7 +15,18 @@ class TestNautobotAdapter(TestCase): def setUp(self): active_status = Status.objects.get(name="Active") - prefix_vlan_relationship = Relationship.objects.get(label="Prefix -> VLAN") + relationship_dict = { + "label": "Prefix -> VLAN", + "key": "prefix_to_vlan", + "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, + "source_type": ContentType.objects.get_for_model(Prefix), + "source_label": "Prefix", + "destination_type": ContentType.objects.get_for_model(VLAN), + "destination_label": "VLAN", + } + prefix_vlan_relationship = Relationship.objects.get_or_create( + label=relationship_dict["label"], defaults=relationship_dict + )[0] vlan_group1 = VLANGroup.objects.create(name="one") vlan_group2 = VLANGroup.objects.create(name="two") vlan10 = VLAN.objects.create( diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index e4dd93fc4..4e4ca8407 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -5,7 +5,8 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.models import CustomField, Status, Tag +from nautobot.extras.choices import CustomFieldTypeChoices, RelationshipTypeChoices +from nautobot.extras.models import CustomField, Relationship, Status, Tag from nautobot.ipam.models import VLAN, IPAddress, Prefix, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter @@ -17,8 +18,26 @@ class TestTagging(TestCase): def setUp(self): "Test class set up." - self.tag_sync_from_infoblox = Tag.objects.get(name="SSoT Synced from Infoblox") - self.tag_sync_to_infoblox = Tag.objects.get(name="SSoT Synced to Infoblox") + self.tag_sync_from_infoblox, _ = Tag.objects.get_or_create( + name="SSoT Synced from Infoblox", + defaults={ + "name": "SSoT Synced from Infoblox", + "description": "Object synced at some point from Infoblox", + "color": "40bfae", + }, + ) + for model in [IPAddress, Prefix, VLAN]: + self.tag_sync_from_infoblox.content_types.add(ContentType.objects.get_for_model(model)) + self.tag_sync_to_infoblox, _ = Tag.objects.get_or_create( + name="SSoT Synced to Infoblox", + defaults={ + "name": "SSoT Synced to Infoblox", + "description": "Object synced at some point to Infoblox", + "color": "40bfae", + }, + ) + for model in [IPAddress, Prefix, VLAN]: + self.tag_sync_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) def test_tags_have_correct_content_types_set(self): """Ensure tags have correct content types configured.""" @@ -39,7 +58,8 @@ def test_objects_synced_from_infoblox_are_tagged(self): network="10.0.0.0/8", description="Test Network", network_type="network", - status="Active", + ext_attrs={}, + vlans={}, ) infoblox_adapter.add(ds_prefix) ds_ipaddress = infoblox_adapter.ipaddress( @@ -50,12 +70,10 @@ def test_objects_synced_from_infoblox_are_tagged(self): prefix="10.0.0.0/8", prefix_length=8, ip_addr_type="host", + ext_attrs={}, ) infoblox_adapter.add(ds_ipaddress) - ds_vlangroup = infoblox_adapter.vlangroup( - name="TestVLANGroup", - description="", - ) + ds_vlangroup = infoblox_adapter.vlangroup(name="TestVLANGroup", description="", ext_attrs={}) infoblox_adapter.add(ds_vlangroup) ds_vlan = infoblox_adapter.vlan( vid=750, @@ -80,6 +98,17 @@ def test_objects_synced_from_infoblox_are_tagged(self): def test_objects_synced_to_infoblox_are_tagged(self): """Ensure objects synced to Infoblox have 'SSoT Synced to Infoblox' tag applied.""" + relationship_dict = { + "label": "Prefix -> VLAN", + "key": "prefix_to_vlan", + "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, + "source_type": ContentType.objects.get_for_model(Prefix), + "source_label": "Prefix", + "destination_type": ContentType.objects.get_for_model(VLAN), + "destination_label": "VLAN", + } + Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict) + nb_prefix = Prefix( network="10.0.0.0", prefix_length=8, @@ -132,7 +161,25 @@ class TestCustomFields(TestCase): def setUp(self): """Test class set up.""" self.today = datetime.date.today().isoformat() - self.cf_synced_to_infoblox = CustomField.objects.get(key="ssot_synced_to_infoblox") + self.cf_synced_to_infoblox, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_DATE, + key="ssot_synced_to_infoblox", + defaults={ + "label": "Last synced to Infoblox on", + }, + ) + for model in [IPAddress, Prefix, VLAN, VLANGroup]: + self.cf_synced_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) + relationship_dict = { + "label": "Prefix -> VLAN", + "key": "prefix_to_vlan", + "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, + "source_type": ContentType.objects.get_for_model(Prefix), + "source_label": "Prefix", + "destination_type": ContentType.objects.get_for_model(VLAN), + "destination_label": "VLAN", + } + Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict) def test_cfs_have_correct_content_types_set(self): """Ensure cfs have correct content types configured.""" From e7323a9b79875139cb040130e32cff3766c72506 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 15:27:06 -0500 Subject: [PATCH 017/229] =?UTF-8?q?test:=20=E2=9C=85=20Fix=20contrib=20tes?= =?UTF-8?q?t?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/contrib_base_classes.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/contrib_base_classes.py b/nautobot_ssot/tests/contrib_base_classes.py index 28330f748..c694e85a4 100644 --- a/nautobot_ssot/tests/contrib_base_classes.py +++ b/nautobot_ssot/tests/contrib_base_classes.py @@ -33,9 +33,12 @@ def setUpTestData(cls): cls.device_role.content_types.set([ContentType.objects.get_for_model(dcim_models.Device)]) cls.manufacturer = dcim_models.Manufacturer.objects.create(name="Generic Inc.") cls.device_type = dcim_models.DeviceType.objects.create(model="Generic Switch", manufacturer=cls.manufacturer) + cls.location_type, created = dcim_models.LocationType.objects.get_or_create(name="Site") + if created: + cls.location_type.content_types.add(ContentType.objects.get_for_model(dcim_models.Device)) cls.location = dcim_models.Location.objects.create( name="Bremen", - location_type=dcim_models.LocationType.objects.get_or_create(name="Site")[0], + location_type=cls.location_type, status=cls.status_active, ) for name in ["sw01", "sw02"]: From 63097b7632bd41345e4cbe7d6e50a76ea13def37 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 16:15:16 -0500 Subject: [PATCH 018/229] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20Redo?= =?UTF-8?q?=20Relationship=20creation=20to=20be=20done=20in=20fixture=20me?= =?UTF-8?q?thod=20and=20update=20tests=20to=20call=20fixture=20method.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/infoblox/fixtures_infoblox.py | 18 ++++++++++++ .../tests/infoblox/test_nautobot_adapter.py | 17 ++--------- .../tests/infoblox/test_tags_and_cfs.py | 28 ++++--------------- 3 files changed, 26 insertions(+), 37 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index 23f2b50eb..2b05e7f91 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -5,6 +5,10 @@ import json import os +from django.contrib.contenttypes.models import ContentType +from nautobot.extras.choices import RelationshipTypeChoices +from nautobot.extras.models import Relationship +from nautobot.ipam.models import Prefix, VLAN from nautobot_ssot.integrations.infoblox.utils import client @@ -26,6 +30,20 @@ def localhost_client_infoblox(localhost_url): ) +def create_prefix_relationship(): + """Create Relationship for Prefix -> VLAN.""" + relationship_dict = { # pylint: disable=duplicate-code + "label": "Prefix -> VLAN", + "key": "prefix_to_vlan", + "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, + "source_type": ContentType.objects.get_for_model(Prefix), + "source_label": "Prefix", + "destination_type": ContentType.objects.get_for_model(VLAN), + "destination_label": "VLAN", + } + return Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict)[0] + + def get_all_ipv4address_networks(): """Return all IPv4Address networks.""" return _json_read_fixture("get_all_ipv4address_networks.json") diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index e8b0fbd4f..023a5fe5d 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -3,11 +3,11 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.choices import RelationshipTypeChoices -from nautobot.extras.models import Relationship, RelationshipAssociation, Status +from nautobot.extras.models import RelationshipAssociation, Status from nautobot.ipam.models import Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter +from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_prefix_relationship class TestNautobotAdapter(TestCase): @@ -15,18 +15,7 @@ class TestNautobotAdapter(TestCase): def setUp(self): active_status = Status.objects.get(name="Active") - relationship_dict = { - "label": "Prefix -> VLAN", - "key": "prefix_to_vlan", - "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, - "source_type": ContentType.objects.get_for_model(Prefix), - "source_label": "Prefix", - "destination_type": ContentType.objects.get_for_model(VLAN), - "destination_label": "VLAN", - } - prefix_vlan_relationship = Relationship.objects.get_or_create( - label=relationship_dict["label"], defaults=relationship_dict - )[0] + prefix_vlan_relationship = create_prefix_relationship() vlan_group1 = VLANGroup.objects.create(name="one") vlan_group2 = VLANGroup.objects.create(name="two") vlan10 = VLAN.objects.create( diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index 4e4ca8407..3df84923d 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -5,12 +5,13 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.choices import CustomFieldTypeChoices, RelationshipTypeChoices -from nautobot.extras.models import CustomField, Relationship, Status, Tag +from nautobot.extras.choices import CustomFieldTypeChoices +from nautobot.extras.models import CustomField, Status, Tag from nautobot.ipam.models import VLAN, IPAddress, Prefix, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter +from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_prefix_relationship class TestTagging(TestCase): @@ -98,17 +99,7 @@ def test_objects_synced_from_infoblox_are_tagged(self): def test_objects_synced_to_infoblox_are_tagged(self): """Ensure objects synced to Infoblox have 'SSoT Synced to Infoblox' tag applied.""" - relationship_dict = { - "label": "Prefix -> VLAN", - "key": "prefix_to_vlan", - "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, - "source_type": ContentType.objects.get_for_model(Prefix), - "source_label": "Prefix", - "destination_type": ContentType.objects.get_for_model(VLAN), - "destination_label": "VLAN", - } - Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict) - + create_prefix_relationship() nb_prefix = Prefix( network="10.0.0.0", prefix_length=8, @@ -170,16 +161,7 @@ def setUp(self): ) for model in [IPAddress, Prefix, VLAN, VLANGroup]: self.cf_synced_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) - relationship_dict = { - "label": "Prefix -> VLAN", - "key": "prefix_to_vlan", - "type": RelationshipTypeChoices.TYPE_ONE_TO_MANY, - "source_type": ContentType.objects.get_for_model(Prefix), - "source_label": "Prefix", - "destination_type": ContentType.objects.get_for_model(VLAN), - "destination_label": "VLAN", - } - Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict) + create_prefix_relationship() def test_cfs_have_correct_content_types_set(self): """Ensure cfs have correct content types configured.""" From 6d7c7606c164e3234ed21709a963538389867780 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 16:15:49 -0500 Subject: [PATCH 019/229] =?UTF-8?q?test:=20=E2=9C=85=20Fix=20D42=20tests?= =?UTF-8?q?=20to=20ensure=20Site=20LocationType=20is=20configured=20proper?= =?UTF-8?q?ly.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/device42/unit/test_models_nautobot_ipam.py | 4 +++- nautobot_ssot/tests/device42/unit/test_utils_nautobot.py | 7 +++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/tests/device42/unit/test_models_nautobot_ipam.py b/nautobot_ssot/tests/device42/unit/test_models_nautobot_ipam.py index f1e5a26e9..f5a39d73d 100644 --- a/nautobot_ssot/tests/device42/unit/test_models_nautobot_ipam.py +++ b/nautobot_ssot/tests/device42/unit/test_models_nautobot_ipam.py @@ -200,6 +200,7 @@ def setUp(self): status_reserved = Status.objects.get(name="Reserved") loc_type = LocationType.objects.get_or_create(name="Site")[0] loc_type.content_types.add(ContentType.objects.get_for_model(Device)) + loc_type.content_types.add(ContentType.objects.get_for_model(Prefix)) loc = Location.objects.get_or_create(name="Test Site", location_type=loc_type, status=self.status_active)[0] cisco_manu = Manufacturer.objects.get_or_create(name="Cisco")[0] csr1000v = DeviceType.objects.get_or_create(model="CSR1000v", manufacturer=cisco_manu)[0] @@ -420,8 +421,9 @@ def setUp(self): super().setUp() self.status_active = Status.objects.get(name="Active") - site_type = LocationType.objects.get(name="Site") + site_type = LocationType.objects.get_or_create(name="Site")[0] site_type.content_types.add(ContentType.objects.get_for_model(Device)) + site_type.content_types.add(ContentType.objects.get_for_model(VLAN)) self.test_site = Location.objects.create(name="HQ", location_type=site_type, status=self.status_active) self.diffsync = DiffSync() diff --git a/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py b/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py index 087fb16a7..1e8e667e1 100644 --- a/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py +++ b/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py @@ -28,10 +28,13 @@ def setUp(self): super().setUp() self.status_active = Status.objects.get(name="Active") self.cisco_manu, _ = Manufacturer.objects.get_or_create(name="Cisco") + site_lt = LocationType.objects.get_or_create(name="Site")[0] + site_lt.content_types.add(ContentType.objects.get_for_model(Device)) + site_lt.content_types.add(ContentType.objects.get_for_model(VLAN)) self.site = Location.objects.create( name="Test Site", status=self.status_active, - location_type=LocationType.objects.get(name="Site"), + location_type=site_lt, ) self.site.validated_save() _dt = DeviceType.objects.create(model="CSR1000v", manufacturer=self.cisco_manu) @@ -154,7 +157,7 @@ def test_update_custom_fields_add_cf(self): test_site = Location.objects.create( name="Test", location_type=LocationType.objects.get_or_create(name="Site")[0], status=self.status_active ) - self.assertEqual(len(test_site.get_custom_fields()), 4) + self.assertEqual(len(test_site.get_custom_fields()), 0) mock_cfs = { "Test Custom Field": {"key": "Test Custom Field", "value": None, "notes": None}, } From a32200d7c1ef594e192913035818f1180dc95c75 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 7 May 2024 16:16:02 -0500 Subject: [PATCH 020/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20fragment?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/234.fixed | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/234.fixed diff --git a/changes/234.fixed b/changes/234.fixed new file mode 100644 index 000000000..0fce00f92 --- /dev/null +++ b/changes/234.fixed @@ -0,0 +1 @@ +Fixed integration tests so they're no longer dependent upon being enabled in dev environment. \ No newline at end of file From e7f4bd2c60059abf43d31b9343550953a53e9d0b Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 13 May 2024 16:09:27 -0500 Subject: [PATCH 021/229] =?UTF-8?q?test:=20=F0=9F=90=9B=20Correct=20result?= =?UTF-8?q?=20in=20test=20to=20match=20update=20in=20netutils=20mapper.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/device42/unit/test_utils_device42.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/device42/unit/test_utils_device42.py b/nautobot_ssot/tests/device42/unit/test_utils_device42.py index 40ce4543a..5c00330a3 100644 --- a/nautobot_ssot/tests/device42/unit/test_utils_device42.py +++ b/nautobot_ssot/tests/device42/unit/test_utils_device42.py @@ -164,7 +164,7 @@ def test_get_intf_status(self, name, sent, received): # pylint: disable=unused- ("iosxe", "iosxe", "cisco_ios"), ("iosxr", "iosxr", "cisco_xr"), ("nxos", "nxos", "cisco_nxos"), - ("bigip", "f5", "f5_tmsh"), + ("bigip", "f5", "bigip_f5"), ("junos", "junos", "juniper_junos"), ("dell", "dell", "dell"), ] From 02716dd49dbb676393804b5b07d913f4877e1e28 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 13 May 2024 16:12:00 -0500 Subject: [PATCH 022/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20fragment?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/456.fixed | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/456.fixed diff --git a/changes/456.fixed b/changes/456.fixed new file mode 100644 index 000000000..4f31ce0e1 --- /dev/null +++ b/changes/456.fixed @@ -0,0 +1 @@ +Fix Device42 integration unit test that was expecting wrong BIG-IP netmiko platform name. From 93d08d1f9f6630c579e27d06712a9bcf5fe84e89 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 14 May 2024 07:38:29 -0500 Subject: [PATCH 023/229] =?UTF-8?q?test:=20=E2=9C=85=20Remove=20bigip=20fr?= =?UTF-8?q?om=20netmiko=20platform=20checks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/device42/unit/test_utils_device42.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/tests/device42/unit/test_utils_device42.py b/nautobot_ssot/tests/device42/unit/test_utils_device42.py index 5c00330a3..1bdd6476c 100644 --- a/nautobot_ssot/tests/device42/unit/test_utils_device42.py +++ b/nautobot_ssot/tests/device42/unit/test_utils_device42.py @@ -164,7 +164,6 @@ def test_get_intf_status(self, name, sent, received): # pylint: disable=unused- ("iosxe", "iosxe", "cisco_ios"), ("iosxr", "iosxr", "cisco_xr"), ("nxos", "nxos", "cisco_nxos"), - ("bigip", "f5", "bigip_f5"), ("junos", "junos", "juniper_junos"), ("dell", "dell", "dell"), ] From 77a187d75593c72fdedb0a502ae6c4b58b391e6d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:12:54 +0100 Subject: [PATCH 024/229] Add test filters. --- development/nautobot_config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index b33319dc8..a6c0b0773 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -232,6 +232,12 @@ "infoblox_verify_ssl": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL", True)), "infoblox_wapi_version": os.getenv("NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION", "v2.12"), "infoblox_network_view": os.getenv("NAUTOBOT_SSOT_INFOBLOX_NETWORK_VIEW", ""), + "infoblox_sync_filters": [ + {"network_view": "default", "prefixes_ipv4": ["10.0.0.0/8"]}, + {"network_view": "dev", "prefixes_ipv4": ["10.0.0.0/8"]}, + {"network_view": "prod", "prefixes_ipv4": ["10.0.0.0/16"]}, + {"network_view": "test", "prefixes_ipv4": ["10.0.0.0/8"]}, + ], "ipfabric_api_token": os.getenv("NAUTOBOT_SSOT_IPFABRIC_API_TOKEN"), "ipfabric_host": os.getenv("NAUTOBOT_SSOT_IPFABRIC_HOST"), "ipfabric_ssl_verify": is_truthy(os.getenv("NAUTOBOT_SSOT_IPFABRIC_SSL_VERIFY", "False")), From ed6a5e9486d054e8d6d87bcd90c9bd738cbe0ef9 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:13:26 +0100 Subject: [PATCH 025/229] Add sync filters config. --- nautobot_ssot/integrations/infoblox/constant.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index ba3f0950e..01ed95463 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -26,6 +26,7 @@ def _read_app_config(): }, "infoblox_import_subnets": config.get("infoblox_import_subnets"), "infoblox_request_timeout": int(config.get("infoblox_request_timeout", 60)), + "infoblox_sync_filters": config.get("infoblox_sync_filters"), } From 1c60cb3b0beb6f1f072889c42787abe6c5ff3e02 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:14:55 +0100 Subject: [PATCH 026/229] Add network view and loader filtering support. --- .../infoblox/diffsync/adapters/infoblox.py | 218 ++++++++++++++---- 1 file changed, 172 insertions(+), 46 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index c47d234d6..36e4c599a 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -1,31 +1,39 @@ """Infoblox Adapter for Infoblox integration with SSoT app.""" import re +from typing import Optional from diffsync import DiffSync from diffsync.enum import DiffSyncFlags from diffsync.exceptions import ObjectAlreadyExists from nautobot.extras.plugins.exceptions import PluginImproperlyConfigured + from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG -from nautobot_ssot.integrations.infoblox.utils.client import get_default_ext_attrs, get_dns_name -from nautobot_ssot.integrations.infoblox.utils.diffsync import get_ext_attr_dict, build_vlan_map from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import ( InfobloxIPAddress, + InfobloxNamespace, InfobloxNetwork, - InfobloxVLANView, InfobloxVLAN, + InfobloxVLANView, +) +from nautobot_ssot.integrations.infoblox.utils.client import get_default_ext_attrs, get_dns_name +from nautobot_ssot.integrations.infoblox.utils.diffsync import ( + build_vlan_map, + get_ext_attr_dict, + map_network_view_to_namespace, ) class InfobloxAdapter(DiffSync): """DiffSync adapter using requests to communicate to Infoblox server.""" + namespace = InfobloxNamespace prefix = InfobloxNetwork ipaddress = InfobloxIPAddress vlangroup = InfobloxVLANView vlan = InfobloxVLAN - top_level = ["vlangroup", "vlan", "prefix", "ipaddress"] + top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress"] def __init__(self, *args, job=None, sync=None, conn, **kwargs): """Initialize Infoblox. @@ -47,43 +55,154 @@ def __init__(self, *args, job=None, sync=None, conn, **kwargs): ) raise PluginImproperlyConfigured - def load_prefixes(self): - """Load InfobloxNetwork DiffSync model.""" - if PLUGIN_CFG.get("infoblox_import_subnets"): - if self.job.debug: - self.job.logger.debug("Loading Subnets from Infoblox.") - subnets = [] - containers = [] - for prefix in PLUGIN_CFG["infoblox_import_subnets"]: - # Get all child containers and subnets - tree = self.conn.get_tree_from_container(prefix) - containers.extend(tree) - - # Need to check if the container has children. If it does, we need to get all subnets from the children - # If it doesn't, we can just get all subnets from the container - if tree: - for subnet in tree: - subnets.extend(self.conn.get_child_subnets_from_container(prefix=subnet["network"])) - else: - subnets.extend(self.conn.get_all_subnets(prefix=prefix)) - - # Remove duplicates if a child subnet is included infoblox_import_subnets config + def load_network_views(self, sync_filters): + """Load Namespace DiffSync model.""" + network_view_filters = {sf["network_view"] for sf in sync_filters if "network_view" in sf} + networkviews = self.conn.get_network_views() + default_ext_attrs = get_default_ext_attrs(review_list=networkviews) + # TODO: Remove after development is done @progala + self.job.logger.info(f"NVFilters: {network_view_filters}, NetworkViews: {networkviews}") + for _nv in networkviews: + # Do not load Network Views not present in the sync filters + if _nv["name"] not in network_view_filters: + continue + namespace_name = map_network_view_to_namespace(_nv["name"]) + networkview_ext_attrs = get_ext_attr_dict(extattrs=_nv.get("extattrs", {})) + new_namespace = self.namespace( + name=namespace_name, + ext_attrs={**default_ext_attrs, **networkview_ext_attrs}, + ) + self.add(new_namespace) + + def _load_prefixes_filtered(self, sync_filter: dict, ip_version: str = "ipv4"): + """Loads prefixes from Infoblox based on the provided sync filter. + + Args: + sync_filter (dict): Sync filter containing sync rules + ip_version (str): IP version of prefixes, either "ipv4" or "ipv6" + + Returns: + (tuple): Tuple consisting of list of container prefixes and a list of subnet prefixes + """ + containers = [] + subnets = [] + network_view = None + if "network_view" in sync_filter: + network_view = sync_filter["network_view"] + + prefix_filter_attr = f"prefixes_{ip_version}" + + for prefix in sync_filter[prefix_filter_attr]: + tree = self.conn.get_tree_from_container(root_container=prefix, network_view=network_view) + containers.extend(tree) + # Need to check if the container has children. If it does, we need to get all subnets from the children + # If it doesn't, we can just get all subnets from the container + if tree: + for subnet in tree: + subnets.extend( + self.conn.get_child_subnets_from_container(prefix=subnet["network"], network_view=network_view) + ) + else: + subnets.extend(self.conn.get_all_subnets(prefix=prefix, network_view=network_view)) + + # We need to remove duplicate prefixes if Network View support is not enabled + if not network_view: + containers = self.conn.remove_duplicates(containers) subnets = self.conn.remove_duplicates(subnets) - all_networks = self.conn.remove_duplicates(containers) + subnets - else: - # Need to load containers here to prevent duplicates when syncing back to Infoblox - containers = self.conn.get_network_containers() - subnets = self.conn.get_all_subnets() - if PLUGIN_CFG.get("infoblox_import_objects_subnets_ipv6"): - containers += self.conn.get_network_containers(ipv6=True) - subnets += self.conn.get_all_subnets(ipv6=True) - all_networks = containers + subnets + + return containers, subnets + + def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, include_ipv6: bool): + """Loads all of the Infoblox prefixes based on the sync filter rules. + + Args: + sync_filters (list): List of dicts, each dict is a single sync filter definition + include_ipv4 (bool): Whether to include IPv4 prefixes + include_ipv6 (bool): Whether to include IPv6 prefixes + + Returns: + (tuple): Tuple consisting of list of container prefixes and a list of subnet prefixes + """ + all_containers = [] + all_subnets = [] + for sync_filter in sync_filters: + pfx_filter_ipv4 = "prefixes_ipv4" in sync_filter + pfx_filter_ipv6 = "prefixes_ipv6" in sync_filter + if pfx_filter_ipv4 and include_ipv4: + containers, subnets = self._load_prefixes_filtered(sync_filter=sync_filter, ip_version="ipv4") + all_containers.extend(containers) + all_subnets.extend(subnets) + if pfx_filter_ipv6 and include_ipv6: + containers, subnets = self._load_prefixes_filtered(sync_filter=sync_filter, ip_version="ipv6") + all_subnets.extend(subnets) + all_containers.extend(containers) + # Mimic default behavior of `infoblox_network_view` setting + if "network_view" in sync_filter and not (pfx_filter_ipv4 or pfx_filter_ipv6): + network_view = sync_filter["network_view"] + if include_ipv4: + all_containers.extend(self.conn.get_network_containers(network_view=network_view)) + all_subnets.extend(self.conn.get_all_subnets(network_view=network_view)) + if include_ipv6: + all_containers.extend(self.conn.get_network_containers(network_view=network_view, ipv6=True)) + all_subnets.extend(self.conn.get_all_subnets(network_view=network_view, ipv6=True)) + + return all_containers, all_subnets + + def _load_all_prefixes_unfiltered(self, include_ipv4: bool, include_ipv6: bool): + """Loads all prefixes from Infoblox. Removes duplicates, if same prefix is found in different network views. + + Args: + include_ipv4: Whether to include IPv4 prefixes + include_ipv6: Whether to include IPv6 prefixes + + Returns: + (tuple): Tuple consisting of list of container prefixes and a list of subnet prefixes + """ + containers = [] + subnets = [] + if include_ipv4: + containers.extend(self.conn.get_network_containers()) + subnets.extend(self.conn.get_all_subnets()) + if include_ipv6: + containers.extend(self.conn.get_network_containers(ipv6=True)) + subnets.extend(self.conn.get_all_subnets(ipv6=True)) + + containers = self.conn.remove_duplicates(containers) + subnets = self.conn.remove_duplicates(subnets) + + return containers, subnets + + def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list] = None): + """Load InfobloxNetwork DiffSync model.""" + # TODO: Need to align it with the new filter configuration, @progala + legacy_sync_filter = {} + if PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]: + legacy_sync_filter["network_view"] = PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"] + if PLUGIN_CFG["infoblox_import_subnets"]: + legacy_sync_filter["prefixes_ipv4"] = PLUGIN_CFG["infoblox_import_subnets"] + # TODO: Validate there's no overlap between legacy_sync_filters and sync_filter + # Alternatively, refuse to accept sync_filters if old flags are in place @progala + sync_filters = PLUGIN_CFG["infoblox_sync_filters"] + # TODO: Remove after development is done @progala + self.job.logger.info(f"sync_filters: {sync_filters}") + + if not sync_filters: + containers, subnets = self._load_all_prefixes_unfiltered( + include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) + elif sync_filters: + containers, subnets = self._load_all_prefixes_filtered( + sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) + + all_networks = containers + subnets self.subnets = [(x["network"], x["network_view"]) for x in subnets] default_ext_attrs = get_default_ext_attrs(review_list=all_networks) for _pf in all_networks: pf_ext_attrs = get_ext_attr_dict(extattrs=_pf.get("extattrs", {})) new_pf = self.prefix( network=_pf["network"], + namespace=map_network_view_to_namespace(_pf["network_view"]), description=_pf.get("comment", ""), network_type="network" if _pf in subnets else "container", ext_attrs={**default_ext_attrs, **pf_ext_attrs}, @@ -112,17 +231,18 @@ def load_ipaddresses(self): dns_name = "" if _ip["names"]: dns_name = get_dns_name(possible_fqdn=_ip["names"][0]) + namespace = map_network_view_to_namespace(_ip["network_view"]) ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {})) - _, loaded = self.get_or_instantiate( - self.ipaddress, - ids={"address": _ip["ip_address"], "prefix": _ip["network"], "prefix_length": prefix_length}, - attrs={ - "dns_name": dns_name, - "status": self.conn.get_ipaddr_status(_ip), - "ip_addr_type": self.conn.get_ipaddr_type(_ip), - "description": _ip["comment"], - "ext_attrs": {**default_ext_attrs, **ip_ext_attrs}, - }, + new_ip = self.ipaddress( + address=_ip["ip_address"], + prefix=_ip["network"], + prefix_length=prefix_length, + namespace=namespace, + dns_name=dns_name, + status=self.conn.get_ipaddr_status(_ip), + ip_addr_type=self.conn.get_ipaddr_type(_ip), + description=_ip["comment"], + ext_attrs={**default_ext_attrs, **ip_ext_attrs}, ) if not loaded: self.job.logger.warning( @@ -165,9 +285,15 @@ def load_vlans(self): def load(self): """Load all models by calling other methods.""" + # Set ipv4 import to True as default + include_ipv4 = True + sync_filters = PLUGIN_CFG["infoblox_sync_filters"] if "infoblox_import_objects" in PLUGIN_CFG: + # Use config setting to decide whether to import ipv6 + include_ipv6 = PLUGIN_CFG["infoblox_import_objects"].get("subnets_ipv6") + self.load_network_views(include_ipv4=include_ipv4, include_ipv6=include_ipv6, sync_filters=sync_filters) if PLUGIN_CFG["infoblox_import_objects"].get("subnets"): - self.load_prefixes() + self.load_prefixes(sync_filters=sync_filters) if PLUGIN_CFG["infoblox_import_objects"].get("ip_addresses"): self.load_ipaddresses() if PLUGIN_CFG["infoblox_import_objects"].get("vlan_views"): @@ -180,7 +306,7 @@ def load(self): self.load_ipaddresses() self.load_vlanviews() self.load_vlans() - for obj in ["prefix", "ipaddress", "vlangroup", "vlan"]: + for obj in ["prefix", "ipaddress", "vlangroup", "vlan", "namespace"]: if obj in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()[obj])} {obj} from Infoblox.") From c76a2e572fb5a7a9c3e755636ff6ab238f8ac4c0 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:15:24 +0100 Subject: [PATCH 027/229] Add Nautobot namespace and loader filtering support. --- .../infoblox/diffsync/adapters/nautobot.py | 153 ++++++++++++++++-- 1 file changed, 137 insertions(+), 16 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 682c40af2..e09613d67 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -2,24 +2,29 @@ # pylint: disable=duplicate-code import datetime +from typing import Optional + from diffsync import DiffSync from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound from django.contrib.contenttypes.models import ContentType from nautobot.dcim.models import Location from nautobot.extras.choices import CustomFieldTypeChoices -from nautobot.extras.models import Relationship, Role, Status, Tag, CustomField -from nautobot.ipam.models import IPAddress, Prefix, VLAN, VLANGroup +from nautobot.extras.models import CustomField, Relationship, Role, Status, Tag +from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot.tenancy.models import Tenant + +from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG, TAG_COLOR from nautobot_ssot.integrations.infoblox.diffsync.models import ( - NautobotNetwork, NautobotIPAddress, - NautobotVlanGroup, + NautobotNamespace, + NautobotNetwork, NautobotVlan, + NautobotVlanGroup, ) -from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR from nautobot_ssot.integrations.infoblox.utils.diffsync import ( - nautobot_vlan_status, get_default_custom_fields, + map_network_view_to_namespace, + nautobot_vlan_status, ) from nautobot_ssot.integrations.infoblox.utils.nautobot import build_vlan_map_from_relations, get_prefix_vlans @@ -89,18 +94,20 @@ def _tag_object(nautobot_object): class NautobotAdapter(NautobotMixin, DiffSync): # pylint: disable=too-many-instance-attributes """DiffSync adapter using ORM to communicate to Nautobot.""" + namespace = NautobotNamespace prefix = NautobotNetwork ipaddress = NautobotIPAddress vlangroup = NautobotVlanGroup vlan = NautobotVlan - top_level = ["vlangroup", "vlan", "prefix", "ipaddress"] + top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress"] status_map = {} location_map = {} relationship_map = {} tenant_map = {} vrf_map = {} + namespace_map = {} prefix_map = {} role_map = {} ipaddr_map = {} @@ -126,17 +133,88 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): """ super().sync_complete(source, *args, **kwargs) - def load_prefixes(self): + def _get_namespaces_from_sync_filters(self, sync_filters: list) -> set: + """Get namespaces defined in filters.""" + namespaces = set() + for sync_filter in sync_filters: + if "network_view" not in sync_filter: + continue + namespace_name = map_network_view_to_namespace(sync_filter["network_view"]) + namespaces.add(namespace_name) + + return namespaces + + def load_namespaces(self, sync_filters: Optional[list] = None): + """Load Namespace DiffSync model.""" + namespace_names = None + if sync_filters: + namespace_names = self._get_namespaces_from_sync_filters(sync_filters) + if namespace_names: + all_namespaces = Namespace.objects.filter(name__in=namespace_names) + else: + all_namespaces = Namespace.objects.all() + + default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(Namespace)) + for namespace in all_namespaces: + self.namespace_map[namespace.name] = namespace.id + _namespace = self.namespace( + name=namespace.name, + ext_attrs={**default_cfs, **namespace.custom_field_data}, + pk=namespace.id, + ) + try: + self.add(_namespace) + except ObjectAlreadyExists: + self.job.logger.warning(f"Found duplicate namespace: {namespace.name}.") + + def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, include_ipv6: bool): + """Loads prefixes from Nautobot based on the provided sync filter. + + Args: + sync_filter (dict): Sync filter containing sync rules + include_ipv4 (bool): Whether to include IPv4 prefixes + include_ipv6 (bool): Whether to include IPv6 prefixes + + Returns: + (PrefixQuerySet): PrefixQuerySet with prefixes + """ + all_prefixes = Prefix.objects.none() + for sync_filter in sync_filters: + query_filters = {} + if "network_view" in sync_filter: + query_filters["namespace__name"] = sync_filter["network_view"] + if "prefixes_ipv4" in sync_filter and include_ipv4: + for pfx_ipv4 in sync_filter["prefixes_ipv4"]: + query_filters["network__net_contained_or_equal"] = pfx_ipv4 + all_prefixes.union(Prefix.objects.filter(**query_filters)) + if "prefixes_ipv6" in sync_filter and include_ipv6: + for pfx_ipv6 in sync_filter["prefixes_ipv6"]: + query_filters["network__net_contained_or_equal"] = pfx_ipv6 + all_prefixes.union(Prefix.objects.filter(**query_filters)) + # Filter on namespace name only + if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: + all_prefixes.union(Prefix.objects.filter(**query_filters)) + + return all_prefixes + + def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list]): """Load Prefixes from Nautobot.""" - all_prefixes = Prefix.objects.all() + if not sync_filters: + all_prefixes = Prefix.objects.all() + else: + all_prefixes = self._load_all_prefixes_filtered( + sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) + default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(Prefix)) for prefix in all_prefixes: - self.prefix_map[str(prefix.prefix)] = prefix.id + self.prefix_map[(prefix.namespace.name), str(prefix.prefix)] = prefix.id if "ssot_synced_to_infoblox" in prefix.custom_field_data: prefix.custom_field_data.pop("ssot_synced_to_infoblox") current_vlans = get_prefix_vlans(prefix=prefix) _prefix = self.prefix( network=str(prefix.prefix), + namespace=prefix.namespace.name, description=prefix.description, network_type=prefix.type, ext_attrs={**default_cfs, **prefix.custom_field_data}, @@ -151,19 +229,55 @@ def load_prefixes(self): except ObjectAlreadyExists: self.job.logger.warning(f"Found duplicate prefix: {prefix.prefix}.") - def load_ipaddresses(self): + def _load_all_ipaddresses_filtered(self, sync_filters: list, include_ipv4: bool, include_ipv6: bool): + """Loads ip addresses from Nautobot based on the provided sync filter. + + Args: + sync_filter (dict): Sync filter containing sync rules + include_ipv4 (bool): Whether to include IPv4 addresses + include_ipv6 (bool): Whether to include IPv6 addresses + + Returns: + (IPAddressQuerySet): IPAddressQuerySet with ip addresses + """ + all_ipaddresses = IPAddress.objects.none() + for sync_filter in sync_filters: + query_filters = {} + if "network_view" in sync_filter: + query_filters["parent__namespace__name"] = sync_filter["network_view"] + if "prefixes_ipv4" in sync_filter and include_ipv4: + query_filters["host__net_in"] = sync_filter["prefixes_ipv4"] + all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + if "prefixes_ipv6" in sync_filter and include_ipv6: + query_filters["host__net_in"] = sync_filter["prefixes_ipv6"] + all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + # Filter on namespace name only + if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: + all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + + return all_ipaddresses + + def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): """Load IP Addresses from Nautobot.""" default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(IPAddress)) - for ipaddr in IPAddress.objects.all(): + if not sync_filters: + all_ipaddresses = IPAddress.objects.all() + else: + all_ipaddresses = self._load_all_ipaddresses_filtered( + sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) + for ipaddr in all_ipaddresses: self.ipaddr_map[str(ipaddr.address)] = ipaddr.id addr = ipaddr.host - # the last Prefix is the most specific and is assumed the one the IP address resides in - prefix = Prefix.objects.net_contains(addr).last() + prefix = ipaddr.parent # The IP address must have a parent prefix + # Note: In Nautobot 2.0 IP Address *must* have a parent prefix so this should not happen if not prefix: self.job.logger.warning(f"IP Address {addr} does not have a parent prefix and will not be synced.") + self.ipaddr_map[str(ipaddr.address), "Global"] = ipaddr.id continue + self.ipaddr_map[str(ipaddr.address), prefix.namespace.name] = ipaddr.id # IP address must be part of a prefix that is not a container # This means the IP cannot be associated with an IPv4 Network within Infoblox if prefix.type == "container": @@ -177,6 +291,7 @@ def load_ipaddresses(self): _ip = self.ipaddress( address=addr, prefix=str(prefix), + namespace=prefix.namespace.name, status=ipaddr.status.name if ipaddr.status else None, ip_addr_type=ipaddr.type, prefix_length=prefix.prefix_length if prefix else ipaddr.prefix_length, @@ -229,15 +344,21 @@ def load_vlans(self): def load(self): """Load models with data from Nautobot.""" + include_ipv4 = True + include_ipv6 = PLUGIN_CFG.get("infoblox_import_objects", {}).get("subnets_ipv6", False) + sync_filters = PLUGIN_CFG["infoblox_sync_filters"] self.relationship_map = {r.label: r.id for r in Relationship.objects.only("id", "label")} self.status_map = {s.name: s.id for s in Status.objects.only("id", "name")} self.location_map = {loc.name: loc.id for loc in Location.objects.only("id", "name")} self.tenant_map = {t.name: t.id for t in Tenant.objects.only("id", "name")} self.role_map = {r.name: r.id for r in Role.objects.only("id", "name")} - self.load_prefixes() + self.load_namespaces(sync_filters=sync_filters) + if "namespace" in self.dict(): + self.job.logger.info(f"Loaded {len(self.dict()['namespace'])} Namespaces from Nautobot.") + self.load_prefixes(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) if "prefix" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['prefix'])} prefixes from Nautobot.") - self.load_ipaddresses() + self.load_ipaddresses(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) if "ipaddress" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['ipaddress'])} IP addresses from Nautobot.") self.load_vlangroups() From 0c65d25acbb3b41038f66f1060befa589b6bf6ad Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:16:03 +0100 Subject: [PATCH 028/229] Add Namespace model and namespace/network view support. --- .../infoblox/diffsync/models/base.py | 18 ++++++++- .../infoblox/diffsync/models/infoblox.py | 8 +++- .../infoblox/diffsync/models/nautobot.py | 40 +++++++++++++++---- 3 files changed, 56 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index 1d3f42cc1..7579f270e 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -5,14 +5,27 @@ from diffsync import DiffSyncModel +class Namespace(DiffSyncModel): + """Namespace model for DiffSync.""" + + _modelname = "namespace" + _identifiers = ("name",) + _attributes = ("ext_attrs",) + + name: str + ext_attrs: Optional[dict] + pk: Optional[uuid.UUID] = None + + class Network(DiffSyncModel): """Network model for DiffSync.""" _modelname = "prefix" - _identifiers = ("network",) + _identifiers = ("network", "namespace") _attributes = ("description", "network_type", "ext_attrs", "vlans", "ranges") network: str + namespace: str description: Optional[str] network_type: Optional[str] ext_attrs: Optional[dict] @@ -54,13 +67,14 @@ class IPAddress(DiffSyncModel): """IPAddress model for DiffSync.""" _modelname = "ipaddress" - _identifiers = ("address", "prefix", "prefix_length") + _identifiers = ("address", "prefix", "prefix_length", "namespace") _attributes = ("description", "dns_name", "status", "ip_addr_type", "ext_attrs") address: str dns_name: str prefix: str prefix_length: int + namespace: str status: Optional[str] ip_addr_type: Optional[str] description: Optional[str] diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 1613488cc..5e036cefb 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -1,7 +1,7 @@ """Infoblox Models for Infoblox integration with SSoT app.""" from requests.exceptions import HTTPError -from nautobot_ssot.integrations.infoblox.diffsync.models.base import Network, IPAddress, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView class InfobloxNetwork(Network): @@ -104,3 +104,9 @@ def update(self, attrs): # """Delete an IP Address from Infoblox.""" # self.diffsync.conn.delete_host_record(self.get_identifiers()["address"]) # return super().delete() + + +class InfobloxNamespace(Namespace): + """Infoblox implementation of the Namespace model.""" + + # Currently there are no plans to modify Network Views in Infoblox diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index adb57fe5d..d0e5454c0 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -11,9 +11,13 @@ from nautobot.ipam.models import Prefix as OrmPrefix from nautobot.ipam.models import VLAN as OrmVlan from nautobot.ipam.models import VLANGroup as OrmVlanGroup +from nautobot.ipam.models import Namespace as OrmNamespace from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG -from nautobot_ssot.integrations.infoblox.diffsync.models.base import Network, IPAddress, Vlan, VlanView -from nautobot_ssot.integrations.infoblox.utils.diffsync import create_tag_sync_from_infoblox +from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.utils.diffsync import ( + create_tag_sync_from_infoblox, + map_network_view_to_namespace, +) from nautobot_ssot.integrations.infoblox.utils.nautobot import get_prefix_vlans @@ -101,16 +105,20 @@ class NautobotNetwork(Network): @classmethod def create(cls, diffsync, ids, attrs): """Create Prefix object in Nautobot.""" + # Remap "default" Network View to "Global" Namespace + namespace_name = map_network_view_to_namespace(ids["namespace"]) _prefix = OrmPrefix( prefix=ids["network"], status_id=diffsync.status_map["Active"], type=attrs["network_type"], description=attrs.get("description", ""), + namespace_id=diffsync.namespace_map[namespace_name], ) prefix_ranges = attrs.get("ranges") if prefix_ranges: _prefix.cf["dhcp_ranges"] = ",".join(prefix_ranges) - if attrs.get("vlans"): + # Only attempt associating to VLANs if they were actually loaded + if attrs.get("vlans") and diffsync.vlan_map: relation = diffsync.relationship_map["Prefix -> VLAN"] for _, _vlan in attrs["vlans"].items(): index = 0 @@ -136,7 +144,7 @@ def create(cls, diffsync, ids, attrs): process_ext_attrs(diffsync=diffsync, obj=_prefix, extattrs=attrs["ext_attrs"]) _prefix.tags.add(create_tag_sync_from_infoblox()) _prefix.validated_save() - diffsync.prefix_map[ids["network"]] = _prefix.id + diffsync.prefix_map[(ids["namespace"], ids["network"])] = _prefix.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) def update(self, attrs): # pylint: disable=too-many-branches @@ -153,7 +161,8 @@ def update(self, attrs): # pylint: disable=too-many-branches prefix_ranges = attrs.get("ranges") if prefix_ranges: _pf.cf["dhcp_ranges"] = ",".join(prefix_ranges) - if "vlans" in attrs: # pylint: disable=too-many-nested-blocks + # Only attempt associating to VLANs if they were actually loaded + if "vlans" in attrs and self.diffsync.vlan_map: # pylint: disable=too-many-nested-blocks current_vlans = get_prefix_vlans(prefix=_pf) if len(current_vlans) < len(attrs["vlans"]): for _, item in attrs["vlans"].items(): @@ -224,18 +233,19 @@ def create(cls, diffsync, ids, attrs): type=ip_addr_type, description=attrs.get("description", ""), dns_name=attrs.get("dns_name", ""), - parent_id=diffsync.prefix_map[ids["prefix"]], + parent_id=diffsync.prefix_map[(ids["namespace"], ids["prefix"])], ) if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_ip, extattrs=attrs["ext_attrs"]) try: _ip.tags.add(create_tag_sync_from_infoblox()) _ip.validated_save() + diffsync.ipaddr_map[(_ip.address, ids["namespace"])] = _ip.id diffsync.ipaddr_map[_ip.address] = _ip.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except ValidationError as err: diffsync.job.logger.warning( - f"Error with validating IP Address {ids['address']}/{ids['prefix_length']}. {err}" + f"Error with validating IP Address {ids['address']}/{ids['prefix_length']}-{ids['namespace']}. {err}" ) return None @@ -366,3 +376,19 @@ def delete(self): _vlan = OrmVlan.objects.get(id=self.pk) _vlan.delete() return super().delete() + + +class NautobotNamespace(Namespace): + """Nautobot implementation of the Namespace model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Namespace object in Nautobot.""" + _ns = OrmNamespace( + name=ids["name"], + ) + if attrs.get("ext_attrs"): + process_ext_attrs(diffsync=diffsync, obj=_ns, extattrs=attrs["ext_attrs"]) + _ns.validated_save() + diffsync.namespace_map[ids["name"]] = _ns.id + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) From 1e87f5fb7ba434eaead7afae00ec3b4ce8d95065 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:16:35 +0100 Subject: [PATCH 029/229] Add network view support to Infoblox client. --- .../integrations/infoblox/utils/client.py | 468 +++++++++++++----- 1 file changed, 348 insertions(+), 120 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index bae55d40a..6886921bc 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -6,6 +6,7 @@ import ipaddress import logging import re +from typing import Optional import urllib.parse from collections import defaultdict from typing import Optional @@ -54,7 +55,7 @@ def get_default_ext_attrs(review_list: list) -> dict: def get_dns_name(possible_fqdn: str) -> str: - """Validate passed FQDN and returns if found. + """Validates passed FQDN and returns if found. Args: possible_fqdn (str): Potential string to be used for IP Address dns_name. @@ -185,14 +186,14 @@ def _delete(self, resource): """ response = self._request("DELETE", resource) try: - logger.info(response.json()) + logger.debug(response.json()) return response.json() except json.decoder.JSONDecodeError: logger.info(response.text) return response.text def _update(self, resource, **params): - """Delete a resource from Infoblox. + """Update a resource in Infoblox. Args: resource (str): Resource to update @@ -206,17 +207,20 @@ def _update(self, resource, **params): """ response = self._request("PUT", path=resource, params=params) try: - logger.info(response.json()) + logger.debug(response.json()) return response.json() except json.decoder.JSONDecodeError: logger.info(response.text) return response.text - def _get_network_ref(self, prefix): # pylint: disable=inconsistent-return-statements + def _get_network_ref( + self, prefix, network_view: Optional[str] = None + ): # pylint: disable=inconsistent-return-statements """Fetch the _ref of a prefix resource. Args: prefix (str): IPv4 Prefix to fetch the _ref for. + network_view (str): Network View of the prefix to fetch the _ref for. Returns: (str) network _ref or None @@ -224,15 +228,29 @@ def _get_network_ref(self, prefix): # pylint: disable=inconsistent-return-state Returns Response: "network/ZG5zLm5ldHdvcmskMTkyLjAuMi4wLzI0LzA:192.0.2.0/24/default" """ - for item in self.get_all_subnets(prefix): - if item["network"] == prefix: - return item["_ref"] + url_path = "network" + params = {"network": prefix, "_return_as_object": 1} + if network_view: + params["network_view"] = network_view + response = self._request("GET", url_path, params=params) + try: + logger.debug(response.json()) + results = response.json().get("result") + except json.decoder.JSONDecodeError: + logger.info(response.text) + return response.text + if results and len(results): + return results[0] + return None - def _get_network_container_ref(self, prefix): # pylint: disable=inconsistent-return-statements + def _get_network_container_ref( + self, prefix, network_view: Optional[str] = None + ): # pylint: disable=inconsistent-return-statements """Fetch the _ref of a networkcontainer resource. Args: prefix (str): IPv4 Prefix to fetch the _ref for. + network_view (str): Network View of the prefix to fetch the _ref for. Returns: (str) networkcontainer _ref or None @@ -240,12 +258,23 @@ def _get_network_container_ref(self, prefix): # pylint: disable=inconsistent-re Returns Response: "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE5Mi4xNjguMi4wLzI0LzA:192.168.2.0/24/default" """ - for item in self.get_network_containers(): - if item["network"] == prefix: - return item["_ref"] + url_path = "networkcontainer" + params = {"network": prefix, "_return_as_object": 1} + if network_view: + params["network_view"] = network_view + response = self._request("GET", url_path, params=params) + try: + logger.debug(response.json()) + results = response.json().get("result") + except json.decoder.JSONDecodeError: + logger.info(response.text) + return response.text + if results and len(results): + return results[0] + return None def get_all_ipv4address_networks(self, prefixes): - """Get all used / unused IPv4 addresses within the supplied network. + """Get all used / unused IPv4 addresses within the supplied networks. Args: prefixes (List[tuple]): List of Network prefixes and associated network view - ('10.220.0.0/22', 'default') @@ -344,7 +373,7 @@ def create_payload(prefix: str, view: str) -> dict: "object": "ipv4address", "data": {"network_view": view, "network": prefix, "status": "USED"}, "args": { - "_return_fields": "ip_address,mac_address,names,network,objects,status,types,usage,comment,extattrs" + "_return_fields": "ip_address,mac_address,names,network,network_view,objects,status,types,usage,comment,extattrs" }, } return query @@ -378,7 +407,7 @@ def create_payload(prefix: str, view: str) -> dict: return ipaddrs - def create_network(self, prefix, comment=None): + def create_network(self, prefix, comment=None, network_view: Optional[str] = None): """Create a network. Args: @@ -391,12 +420,14 @@ def create_network(self, prefix, comment=None): "network/ZG5zLm5ldHdvcmskMTkyLjE2OC4wLjAvMjMvMA:192.168.0.0/23/default" """ params = {"network": prefix, "comment": comment} + if network_view: + params["network_view"] = network_view api_path = "network" response = self._request("POST", api_path, params=params) - logger.info(response.text) + logger.debug(response.text) return response.text - def delete_network(self, prefix): + def delete_network(self, prefix, network_view: Optional[str] = None): """Delete a network. Args: @@ -408,7 +439,7 @@ def delete_network(self, prefix): Returns Response: {"deleted": "network/ZG5zLm5ldHdvcmskMTkyLjAuMi4wLzI0LzA:192.0.2.0/24/default"} """ - resource = self._get_network_ref(prefix) + resource = self._get_network_ref(prefix=prefix, network_view=network_view) if resource: self._delete(resource) @@ -416,10 +447,10 @@ def delete_network(self, prefix): else: response = {"error": f"{prefix} not found."} - logger.info(response) + logger.debug(response) return response - def update_network(self, prefix, comment=None): + def update_network(self, prefix, comment=None, network_view: Optional[str] = None): """Update a network. Args: @@ -432,7 +463,7 @@ def update_network(self, prefix, comment=None): Return Response: {"updated": "network/ZG5zLm5ldHdvcmskMTkyLjE2OC4wLjAvMjMvMA:192.168.0.0/23/default"} """ - resource = self._get_network_ref(prefix) + resource = self._get_network_ref(prefix=prefix, network_view=network_view) if resource: params = {"network": prefix, "comment": comment} @@ -440,10 +471,10 @@ def update_network(self, prefix, comment=None): response = {"updated": resource} else: response = {"error": f"error updating {prefix}"} - logger.info(response) + logger.debug(response) return response - def create_network_container(self, prefix, comment=None): + def create_network_container(self, prefix, comment=None, network_view: Optional[str] = None): """Create a network container. Args: @@ -456,12 +487,14 @@ def create_network_container(self, prefix, comment=None): "networkcontainer/ZG5zLm5ldHdvcmskMTkyLjE2OC4wLjAvMjMvMA:192.168.0.0/23/default" """ params = {"network": prefix, "comment": comment} + if network_view: + params["network_view"] = network_view api_path = "networkcontainer" response = self._request("POST", api_path, params=params) - logger.info(response.text) + logger.debug(response.text) return response.text - def delete_network_container(self, prefix): + def delete_network_container(self, prefix, network_view: Optional[str] = None): """Delete a network container. Args: @@ -473,18 +506,19 @@ def delete_network_container(self, prefix): Returns Response: {"deleted": "networkcontainer/ZG5zLm5ldHdvcmskMTkyLjAuMi4wLzI0LzA:192.0.2.0/24/default"} """ - resource = self._get_network_container_ref(prefix) + resource = self._get_network_container_ref(prefix=prefix, network_view=network_view) if resource: self._delete(resource) response = {"deleted": resource} else: - response = {"error": f"{prefix} not found."} + nv_msg = f" in network view {network_view}" if network_view else "" + response = {"error": f"{prefix}{nv_msg} not found."} - logger.info(response) + logger.debug(response) return response - def update_network_container(self, prefix, comment=None): + def update_network_container(self, prefix, comment=None, network_view: Optional[str] = None): """Update a network container. Args: @@ -497,18 +531,19 @@ def update_network_container(self, prefix, comment=None): Return Response: {"updated": "networkcontainer/ZG5zLm5ldHdvcmskMTkyLjE2OC4wLjAvMjMvMA:192.168.0.0/23/default"} """ - resource = self._get_network_container_ref(prefix) + resource = self._get_network_container_ref(prefix=prefix, network_view=network_view) if resource: - params = {"network": prefix, "comment": comment} + params = {"comment": comment} self._update(resource, **params) response = {"updated": resource} else: - response = {"error": f"error updating {prefix}"} - logger.info(response) + nv_msg = f" in network view {network_view}" if network_view else "" + response = {"error": f"error updating {prefix}{nv_msg}"} + logger.debug(response) return response - def create_range(self, prefix: str, start: str, end: str) -> str: + def create_range(self, prefix: str, start: str, end: str, network_view: Optional[str] = None) -> str: """Create a range. Args: @@ -523,15 +558,14 @@ def create_range(self, prefix: str, start: str, end: str) -> str: "range/ZG5zLm5ldHdvcmskMTkyLjE2OC4wLjAvMjMvMA:192.168.0.100/192.168.0.254/default" """ params = {"network": prefix, "start_addr": start, "end_addr": end} - plugin_defined_network_view = PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW") - if plugin_defined_network_view: - params["network_view"] = plugin_defined_network_view + if network_view: + params["network_view"] = network_view api_path = "range" response = self._request("POST", api_path, params=params) - logger.info(response.text) + logger.debug(response.text) return response.text - def get_host_record_by_name(self, fqdn): + def get_host_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the host record by using FQDN. Args: @@ -560,11 +594,13 @@ def get_host_record_by_name(self, fqdn): """ url_path = "record:host" params = {"name": fqdn, "_return_as_object": 1} + if network_view: + params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") - def get_host_record_by_ip(self, ip_address): + def get_host_record_by_ip(self, ip_address, network_view: Optional[str] = None): """Get the host record by using IP Address. Args: @@ -593,11 +629,13 @@ def get_host_record_by_ip(self, ip_address): """ url_path = "record:host" params = {"ipv4addr": ip_address, "_return_as_object": 1} + if network_view: + params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") - def get_a_record_by_name(self, fqdn): + def get_a_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the A record for a FQDN. Args: @@ -618,11 +656,16 @@ def get_a_record_by_name(self, fqdn): """ url_path = "record:a" params = {"name": fqdn, "_return_as_object": 1} + # TODO: This is a bit more complicated. One network view can have multiple DNS views + # default name for a DNS view for a network view is formed by prepending "default." to the network view name + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") - def get_a_record_by_ip(self, ip_address): + def get_a_record_by_ip(self, ip_address, network_view: Optional[str] = None): """Get the A record for a IP Address. Args: @@ -643,11 +686,17 @@ def get_a_record_by_ip(self, ip_address): """ url_path = "record:a" params = {"ipv4addr": ip_address, "_return_as_object": 1} + # TODO: This is a bit more complicated. One network view can have multiple DNS views + # default name for a DNS view for a network view is formed by prepending "default." to the network view name + # TODO: Would be interesting to see if we can specify network view in the lookup + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") - def get_ptr_record_by_name(self, fqdn): + def get_ptr_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the PTR record by FQDN. Args: @@ -667,8 +716,11 @@ def get_ptr_record_by_name(self, fqdn): """ url_path = "record:ptr" params = {"ptrdname": fqdn, "_return_as_object": 1} + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") def get_all_dns_views(self): @@ -692,12 +744,12 @@ def get_all_dns_views(self): ] """ url_path = "view" - params = {"_return_as_object": 1} + params = {"_return_fields": "is_default,name,network_view", "_return_as_object": 1} response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") - def create_a_record(self, fqdn, ip_address): + def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): """Create an A record for a given FQDN. Please note: This API call with work only for host records that do not have an associated a record. @@ -715,8 +767,11 @@ def create_a_record(self, fqdn, ip_address): url_path = "record:a" params = {"_return_fields": "name", "_return_as_object": 1} payload = {"name": fqdn, "ipv4addr": ip_address} + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + payload["view"] = dns_view response = self._request("POST", url_path, params=params, json=payload) - logger.info(response.json) + logger.debug(response.json()) return response.json().get("result") def get_dhcp_lease(self, lease_to_check): @@ -741,7 +796,7 @@ def get_dhcp_lease(self, lease_to_check): return self.get_dhcp_lease_from_ipv4(lease_to_check) return self.get_dhcp_lease_from_hostname(lease_to_check) - def get_dhcp_lease_from_ipv4(self, ip_address): + def get_dhcp_lease_from_ipv4(self, ip_address, network_view: Optional[str] = None): """Get a DHCP lease for the IP address passed in. Args: @@ -766,11 +821,13 @@ def get_dhcp_lease_from_ipv4(self, ip_address): "_return_fields": "binding_state,hardware,client_hostname,fingerprint", "_return_as_object": 1, } + if network_view: + params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json() - def get_dhcp_lease_from_hostname(self, hostname): + def get_dhcp_lease_from_hostname(self, hostname, network_view: Optional[str] = None): """Get a DHCP lease for the hostname passed in. Args: @@ -795,11 +852,15 @@ def get_dhcp_lease_from_hostname(self, hostname): "_return_fields": "binding_state,hardware,client_hostname,fingerprint", "_return_as_object": 1, } + if network_view: + params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.info(response.json) + logger.debug(response.json()) return response.json() - def get_all_ranges(self, prefix: Optional[str] = None) -> dict[str, dict[str, list[dict[str, str]]]]: + def get_all_ranges( + self, prefix: Optional[str] = None, network_view: Optional[str] = None + ) -> dict[str, dict[str, list[dict[str, str]]]]: """Get all Ranges. Args: @@ -821,25 +882,24 @@ def get_all_ranges(self, prefix: Optional[str] = None) -> dict[str, dict[str, li """ url_path = "range" params = {"_return_fields": "network,network_view,start_addr,end_addr", "_max_results": 10000} - plugin_defined_network_view = PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW") - if plugin_defined_network_view: - params["network_view"] = plugin_defined_network_view + if network_view: + params["network_view"] = network_view if prefix: - params["network"]: prefix + params["network"] = prefix try: response = self._request("GET", url_path, params=params) except HTTPError as err: logger.info(err.response.text) return {} json_response = response.json() - logger.info(json_response) + logger.debug(json_response()) data = defaultdict(lambda: defaultdict(list)) for prefix_range in json_response: str_range = f"{prefix_range['start_addr']}-{prefix_range['end_addr']}" data[prefix_range["network_view"]][prefix_range["network"]].append(str_range) return data - def get_all_subnets(self, prefix: str = None, ipv6: bool = False): + def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: Optional[str] = None): """Get all Subnets. Args: @@ -879,8 +939,8 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False): "_return_fields": "network,network_view,comment,extattrs,rir_organization,rir,vlans", "_max_results": 10000, } - if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): - params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) + if network_view: + params.update({"network_view": network_view}) if prefix: params.update({"network": prefix}) try: @@ -889,9 +949,10 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False): logger.info(err.response.text) return [] json_response = response.json() - logger.info(json_response) + logger.debug(json_response()) + # TODO: What does the below code do? We don't return any of this. @progala if not ipv6: - ranges = self.get_all_ranges(prefix=prefix) + ranges = self.get_all_ranges(prefix=prefix, network_view=network_view) for returned_prefix in json_response: network_view_ranges = ranges.get(returned_prefix["network_view"], {}) prefix_ranges = network_view_ranges.get(returned_prefix["network"]) @@ -901,7 +962,7 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False): logger.info("Support for DHCP Ranges is not currently supported for IPv6 Networks.") return json_response - def get_authoritative_zone(self): + def get_authoritative_zone(self, network_view: Optional[str] = None): """Get authoritative zone to check if fqdn exists. Returns: @@ -923,11 +984,14 @@ def get_authoritative_zone(self): """ url_path = "zone_auth" params = {"_return_as_object": 1} + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json().get("result") - def _find_network_reference(self, network): + def _find_network_reference(self, network, network_view: Optional[str] = None): """Find the reference for the given network. Returns: @@ -944,11 +1008,13 @@ def _find_network_reference(self, network): """ url_path = "network" params = {"network": network} + if network_view: + params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json() - def find_next_available_ip(self, network): + def find_next_available_ip(self, network, network_view: Optional[str] = None): """Find the next available ip address for a given network. Returns: @@ -964,8 +1030,9 @@ def find_next_available_ip(self, network): next_ip_avail = "" # Find the Network reference id try: - network_ref_id = self._find_network_reference(network) + network_ref_id = self._find_network_reference(network=network, network_view=network_view) except Exception as err: # pylint: disable=broad-except + # TODO: Add network-view to the error @progala logger.warning("Network reference not found for %s: %s", network, err) return next_ip_avail @@ -975,12 +1042,12 @@ def find_next_available_ip(self, network): params = {"_function": "next_available_ip"} payload = {"num": 1} response = self._request("POST", url_path, params=params, json=payload) - logger.info(response.json()) + logger.debug(response.json()) next_ip_avail = response.json().get("ips")[0] return next_ip_avail - def reserve_fixed_address(self, network, mac_address): + def reserve_fixed_address(self, network, mac_address, network_view: Optional[str] = None): """Reserve the next available ip address for a given network range. Returns: @@ -990,17 +1057,19 @@ def reserve_fixed_address(self, network, mac_address): "10.220.0.1" """ # Get the next available IP Address for this network - ip_address = self.find_next_available_ip(network) + ip_address = self.find_next_available_ip(network=network, network_view=network_view) if ip_address: url_path = "fixedaddress" params = {"_return_fields": "ipv4addr", "_return_as_object": 1} payload = {"ipv4addr": ip_address, "mac": mac_address} + if network_view: + payload["network_view"] = network_view response = self._request("POST", url_path, params=params, json=payload) - logger.info(response.json()) + logger.debug(response.json()) return response.json().get("result").get("ipv4addr") return False - def create_fixed_address(self, ip_address, mac_address): + def create_fixed_address(self, ip_address, mac_address, network_view: Optional[str] = None): """Create a fixed ip address within Infoblox. Returns: @@ -1012,11 +1081,13 @@ def create_fixed_address(self, ip_address, mac_address): url_path = "fixedaddress" params = {"_return_fields": "ipv4addr", "_return_as_object": 1} payload = {"ipv4addr": ip_address, "mac": mac_address} + if network_view: + payload["network_view"] = network_view response = self._request("POST", url_path, params=params, json=payload) - logger.info(response.json()) + logger.debug(response.json()) return response.json().get("result").get("ipv4addr") - def create_host_record(self, fqdn, ip_address): + def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = None): """Create a host record for a given FQDN. Please note: This API call with work only for host records that do not have an associated a record. @@ -1035,27 +1106,30 @@ def create_host_record(self, fqdn, ip_address): url_path = "record:host" params = {"_return_fields": "name", "_return_as_object": 1} payload = {"name": fqdn, "configure_for_dns": False, "ipv4addrs": [{"ipv4addr": ip_address}]} + if network_view: + payload["network_view"] = network_view try: response = self._request("POST", url_path, params=params, json=payload) except HTTPError as err: logger.info("Host record error: %s", err.response.text) return [] - logger.info("Infoblox host record created: %s", response.json()) + logger.debug("Infoblox host record created: %s", response.json()) return response.json().get("result") - def delete_host_record(self, ip_address): + def delete_host_record(self, ip_address, network_view: Optional[str] = None): """Delete provided IP Address from Infoblox.""" - resource = self.get_host_record_by_ip(ip_address) + resource = self.get_host_record_by_ip(ip_address=ip_address, network_view=network_view) + # TODO: Add network view to messages @progala if resource: ref = resource[0]["_ref"] self._delete(ref) response = {"deleted": ip_address} else: response = {"error": f"Did not find {ip_address}"} - logger.info(response) + logger.debug(response) return response - def create_ptr_record(self, fqdn, ip_address): + def create_ptr_record(self, fqdn, ip_address, network_view: Optional[str] = None): """Create an PTR record for a given FQDN. Args: @@ -1074,12 +1148,16 @@ def create_ptr_record(self, fqdn, ip_address): } """ url_path = "record:ptr" - params = {"_return_fields": "name,ptrdname,ipv4addr", "_return_as_object": 1} + params = {"_return_fields": "name,ptrdname,ipv4addr,view", "_return_as_object": 1} reverse_host = str(reversename.from_address(ip_address))[ 0:-1 ] # infoblox does not accept the top most domain '.', so we strip it - payload = {"name": reverse_host, "ptrdname": fqdn, "ipv4addr": ip_address} + payload = {"name": reverse_host, "ptrdname": fqdn} # , "ipv4addr": ip_address} + if network_view: + dns_view = self.get_default_dns_view_for_network_view(network_view) + payload["view"] = dns_view response = self._request("POST", url_path, params=params, json=payload) + # TODO: Add network view/dns view to the message @progala logger.info("Infoblox PTR record created: %s", response.json()) return response.json().get("result") @@ -1114,7 +1192,7 @@ def search_ipv4_address(self, ip_address): url_path = "search" params = {"address": ip_address, "_return_as_object": 1} response = self._request("GET", url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json().get("result") def get_vlan_view(self, name="Nautobot"): @@ -1139,7 +1217,7 @@ def get_vlan_view(self, name="Nautobot"): url_path = "vlanview" params = {"name": name} response = self._request("GET", path=url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json() def create_vlan_view(self, name, start_vid=1, end_vid=4094): @@ -1159,7 +1237,7 @@ def create_vlan_view(self, name, start_vid=1, end_vid=4094): url_path = "vlanview" params = {"name": name, "start_vlan_id": start_vid, "end_vlan_id": end_vid} response = self._request("POST", path=url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json() def get_vlanviews(self): @@ -1189,7 +1267,7 @@ def get_vlanviews(self): url_path = "vlanview" params = {"_return_fields": "name,comment,start_vlan_id,end_vlan_id,extattrs"} response = self._request("GET", url_path, params=params) - logger.info(response.json()) + logger.debug(response.json()) return response.json() def get_vlans(self): @@ -1238,8 +1316,11 @@ def get_vlans(self): ] ) response = self._request("POST", url_path, data=payload) - logger.info(response.json()[0]) - return response.json()[0] + logger.debug(response.json()) + if len(response.json()): + return response.json()[0] + else: + return [] def create_vlan(self, vlan_id, vlan_name, vlan_view): """Create a VLAN in Infoblox. @@ -1266,7 +1347,7 @@ def create_vlan(self, vlan_id, vlan_name, vlan_view): params = {} payload = {"parent": parent, "id": vlan_id, "name": vlan_name} response = self._request("POST", url_path, params=params, json=payload) - logger.info(response.json()) + logger.debug(response.json()) return response.json() @staticmethod @@ -1285,7 +1366,7 @@ def get_ipaddr_type(ip_record: dict) -> str: return "slaac" return "host" - def _find_resource(self, resource, **params): + def _find_matching_resources(self, resource, **params): """Find the resource for given parameters. Returns: @@ -1295,17 +1376,19 @@ def _find_resource(self, resource, **params): _ref: fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMjIwLjAuMy4wLi4:10.220.0.3/default """ response = self._request("GET", resource, params=params) - logger.info(response.json()) - for _resource in response.json(): - return _resource.get("_ref") + logger.debug(response.json()) return response.json() # TODO: See if we should accept params dictionary and extended to both host record and fixed address - def update_ipaddress(self, ip_address, **data): # pylint: disable=inconsistent-return-statements - """Update a Network object with a given prefix. + # TODO: This doesn't work very well at all currently @progala + # Perhaps make multiple searches, or go through types returned by the search + def update_ipaddress( + self, ip_address, network_view: Optional[str] = None, **data + ): # pylint: disable=inconsistent-return-statements + """Update a IP Address object with a given ip address. Args: - prefix (str): Valid IP prefix + ip_address (str): Valid IP address data (dict): keyword args used to update the object e.g. comment="updateme" Returns: @@ -1317,33 +1400,70 @@ def update_ipaddress(self, ip_address, **data): # pylint: disable=inconsistent- "ipv4addr": "10.220.0.3" } """ - resource = self._find_resource("search", address=ip_address) - if not resource: + # resources = self._find_matching_resources("search", search_string=ip_address, objtype="fixedaddress") + # resources.extend(self._find_matching_resources("search", search_string=ip_address, objtype="record:host")) + resources = self._find_matching_resources("search", address=ip_address) + if not resources: + return + found_ipv4_ref = None + # We can get multiple resources of varying types. The name of resource is embedded in the `_ref` attr + resource_types = ["fixedaddress"] + if network_view: + for resource in resources: + ref = resource.get("_ref") + if ref.split("/")[0] not in resource_types: + continue + if resource.get("network_view") != network_view: + continue + if resource.get("ipv4addr") != ip_address: + continue + found_ipv4_ref = ref + break + else: + for resource in resources: + ref = resource.get("_ref") + if ref.split("/")[0] not in resource_types: + continue + if resource.get("ipv4addr") != ip_address: + continue + found_ipv4_ref = ref + break + + if not found_ipv4_ref: return # params = {"_return_fields": "ipv4addr", "_return_as_object": 1} params = {} try: - logger.info(data) - response = self._request("PUT", path=resource, params=params, json=data["data"]) + logger.debug(data) + response = self._request("PUT", path=found_ipv4_ref, params=params, json=data) except HTTPError as err: - logger.info("Resource: %s", resource) + logger.info("Resource: %s", found_ipv4_ref) logger.info("Could not update IP address: %s", err.response.text) return logger.info("Infoblox IP Address updated: %s", response.json()) return response.json() - def get_tree_from_container(self, root_container: str) -> list: + def get_tree_from_container(self, root_container: str, network_view: Optional[str] = None) -> list: """Returns the list of all child containers from a given root container.""" flattened_tree = [] stack = [] root_containers = self.get_network_containers(prefix=root_container) + if network_view: + root_containers = self.get_network_containers(prefix=root_container, network_view=network_view) + else: + root_containers = self.get_network_containers(prefix=root_container) if root_containers: stack = [root_containers[0]] + get_child_network_containers_kwargs = {} + if network_view: + get_child_network_containers_kwargs["network_view"] = network_view + while stack: current_node = stack.pop() + get_child_network_containers_kwargs.update({"prefix": current_node["network"]}) flattened_tree.append(current_node) - children = self.get_child_network_containers(prefix=current_node["network"]) + children = self.get_child_network_containers(**get_child_network_containers_kwargs) stack.extend(children) return flattened_tree @@ -1359,7 +1479,7 @@ def remove_duplicates(self, network_list: list) -> list: return new_list - def get_network_containers(self, prefix: str = "", ipv6: bool = False): + def get_network_containers(self, prefix: str = "", ipv6: bool = False, network_view: Optional[str] = None): """Get all Network Containers. Args: @@ -1391,19 +1511,19 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False): "_return_fields": "network,comment,network_view,extattrs,rir_organization,rir", "_max_results": 100000, } - if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): - params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) + if network_view: + params.update({"network_view": network_view}) if prefix: params.update({"network": prefix}) response = self._request("GET", url_path, params=params) response = response.json() - logger.info(response) + logger.debug(response) results = response.get("result", []) for res in results: res.update({"status": "container"}) return results - def get_child_network_containers(self, prefix: str): + def get_child_network_containers(self, prefix: str, network_view: Optional[str] = None): """Get all Child Network Containers for Container. Returns: @@ -1435,18 +1555,18 @@ def get_child_network_containers(self, prefix: str): "_return_fields": "network,comment,network_view,extattrs,rir_organization,rir", "_max_results": 100000, } - if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): - params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) + if network_view: + params.update({"network_view": network_view}) params.update({"network_container": prefix}) response = self._request("GET", url_path, params=params) response = response.json() - logger.info(response) + logger.debug(response) results = response.get("result", []) for res in results: res.update({"status": "container"}) return results - def get_child_subnets_from_container(self, prefix: str): + def get_child_subnets_from_container(self, prefix: str, network_view: Optional[str] = None): """Get child subnets from container. Args: @@ -1483,8 +1603,8 @@ def get_child_subnets_from_container(self, prefix: str): "_return_fields": "network,network_view,comment,extattrs,rir_organization,rir,vlans", "_max_results": 10000, } - if PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_NETWORK_VIEW"): - params.update({"network_view": PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]}) + if network_view: + params.update({"network_view": network_view}) params.update({"network_container": prefix}) try: @@ -1493,5 +1613,113 @@ def get_child_subnets_from_container(self, prefix: str): logger.info(err.response.text) return [] response = response.json() - logger.info(response) + logger.debug(response) return response.get("result") + + def get_network_views(self): + """Get all network views. + + Returns: + (list) of record dicts + + Return Response: + [ + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true", + "associated_dns_views": [ + "default" + ], + "extattrs": { + + }, + "is_default": true, + "name": "default" + }, + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQx:prod/false", + "associated_dns_views": [ + "default.prod" + ], + "extattrs": { + + }, + "is_default": false, + "name": "prod" + }, + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQy:dev/false", + "associated_dns_views": [ + "default.dev" + ], + "extattrs": { + + }, + "is_default": false, + "name": "dev" + } + ] + """ + url_path = "networkview" + params = { + "_return_fields": "name,associated_dns_views,extattrs,comment,is_default", + } + try: + response = self._request("GET", url_path, params=params) + except HTTPError as err: + logger.info(err.response.text) + return [] + logger.debug(response.json()) + return response.json() + + def get_network_view(self, name: str): + """Get network view object for given name. + + Args: + name (str): Name of the network view - 'dev' + + Returns: + (dict) record dict + + Return Response: + [ + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQy:dev/false", + "associated_dns_views": [ + "default.dev" + ], + "extattrs": { + + }, + "is_default": false, + "name": "dev" + } + ] + """ + url_path = "networkview" + params = { + "name": name, + "_return_fields": "name,associated_dns_views,extattrs,comment,is_default", + } + try: + response = self._request("GET", path=url_path, params=params) + except HTTPError as err: + logger.info(err.response.text) + return [] + logger.debug(response.json()) + return response.json() + + def get_default_dns_view_for_network_view(self, network_view: str): + """Get default (first on the list) DNS view for given network view. + + Args: + network_view (str): Name of the network view - 'dev' + + Returns: + (str) name of the default dns view + """ + _network_view = self.get_network_view(network_view) + logger.info(_network_view) + if _network_view: + return _network_view[0]["associated_dns_views"][0] + else: + return None From 8b1bbaf60d01ee7fc38de4ba30d6130f809726a1 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:17:03 +0100 Subject: [PATCH 030/229] Add NetworkView to Namespace remapping utility. --- .../integrations/infoblox/utils/diffsync.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index df9829019..2615e1d23 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -97,3 +97,20 @@ def get_default_custom_fields(cf_contenttype: ContentType) -> dict: if customfield.key not in default_cfs: default_cfs[customfield.key] = None return default_cfs + + +def map_network_view_to_namespace(network_view: str) -> str: + """Remaps Infoblox Network View name to Nautobot Namespace name. + + This matters most for mapping default "default" Network View to default Namespace "Global". + + Args: + network_view (str): Infoblox Network View name + + Returns: + (str) corresponding Nautobot Namespace name + """ + network_view_to_namespace = { + "default": "Global", + } + return network_view_to_namespace.get(network_view, network_view) From 6018ae8c6d544d13476b31ea5bab7e2a02080ebf Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 29 Apr 2024 20:17:18 +0100 Subject: [PATCH 031/229] Add Namespace to job. --- nautobot_ssot/integrations/infoblox/jobs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index 64578d187..89ae8b2ab 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -36,6 +36,7 @@ class Meta: # pylint: disable=too-few-public-methods def data_mappings(cls): """Show mapping of models between Infoblox and Nautobot.""" return ( + DataMapping("network_view", None, "Namespace", reverse("ipam:namespace_list")), DataMapping("network", None, "Prefix", reverse("ipam:prefix_list")), DataMapping("ipaddress", None, "IP Address", reverse("ipam:ipaddress_list")), DataMapping("vlan", None, "VLAN", reverse("ipam:vlan_list")), From 93194648eaf916f37a287eccc33be348c4e47ad8 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 30 Apr 2024 18:30:41 +0100 Subject: [PATCH 032/229] Add Namespace models --- .../integrations/infoblox/diffsync/models/__init__.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index 15cdf49df..9dd609bf6 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,14 +1,15 @@ """Initialize models for Nautobot and Infoblox.""" - -from .nautobot import NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan -from .infoblox import InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN +from .nautobot import NautobotNamespace, NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan +from .infoblox import InfobloxNamespace, InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN __all__ = [ + "NautobotNamespace", "NautobotNetwork", "NautobotIPAddress", "NautobotVlanGroup", "NautobotVlan", + "InfobloxNamespace", "InfobloxNetwork", "InfobloxIPAddress", "InfobloxVLANView", From 02b11a7ddcefb95708caacfdbdeaf224dea67194 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 9 May 2024 10:20:35 +0100 Subject: [PATCH 033/229] Add config view to the app config. --- nautobot_ssot/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index 7f9afbb22..b1d554c5a 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -120,6 +120,7 @@ class NautobotSSOTAppConfig(NautobotAppConfig): "servicenow_username": "", } caching_config = {} + config_view_name = "plugins:nautobot_ssot:config" def ready(self): """Trigger callback when database is ready.""" From 2401be4190cf7004708a70c16b9b3436124510df Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 9 May 2024 10:22:07 +0100 Subject: [PATCH 034/229] Add non-db config model to enable permissions. --- nautobot_ssot/models.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nautobot_ssot/models.py b/nautobot_ssot/models.py index 711e32a71..073b58f46 100644 --- a/nautobot_ssot/models.py +++ b/nautobot_ssot/models.py @@ -28,12 +28,14 @@ from django.utils.formats import date_format from django.utils.timezone import now + from nautobot.core.models import BaseModel from nautobot.extras.choices import JobResultStatusChoices from nautobot.extras.models import JobResult from nautobot.extras.utils import extras_features from nautobot_ssot.integrations.servicenow.models import SSOTServiceNowConfig +from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig from .choices import SyncLogEntryActionChoices, SyncLogEntryStatusChoices @@ -206,7 +208,14 @@ def get_status_class(self): }.get(self.status) +class SSOTConfig(models.Model): + class Meta: + managed = False + default_permissions = ("view",) + + __all__ = ( + "SSOTInfobloxConfig", "SSOTServiceNowConfig", "Sync", "SyncLogEntry", From fa4bd2235acee16dbe07d14557f2adca78c3c67e Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 9 May 2024 10:29:26 +0100 Subject: [PATCH 035/229] Add SSOT config view. Sort imports. --- nautobot_ssot/views.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/nautobot_ssot/views.py b/nautobot_ssot/views.py index 5ad225fee..4924be50e 100644 --- a/nautobot_ssot/views.py +++ b/nautobot_ssot/views.py @@ -3,20 +3,22 @@ import pprint from django.http import Http404 -from django.shortcuts import get_object_or_404 - +from django.shortcuts import get_object_or_404, render +from django.views import View as DjangoView from django_tables2 import RequestConfig - -from nautobot.extras.models import Job as JobModel from nautobot.core.views.generic import BulkDeleteView, ObjectDeleteView, ObjectListView, ObjectView +from nautobot.core.views.mixins import ContentTypePermissionRequiredMixin from nautobot.core.views.paginator import EnhancedPaginator +from nautobot.extras.models import Job as JobModel + +from nautobot_ssot.integrations import utils from .filters import SyncFilterSet, SyncLogEntryFilterSet from .forms import SyncFilterForm, SyncLogEntryFilterForm -from .jobs.base import DataSource, DataTarget from .jobs import get_data_jobs +from .jobs.base import DataSource, DataTarget from .models import Sync, SyncLogEntry -from .tables import DashboardTable, SyncTable, SyncTableSingleSourceOrTarget, SyncLogEntryTable +from .tables import DashboardTable, SyncLogEntryTable, SyncTable, SyncTableSingleSourceOrTarget class DashboardView(ObjectListView): @@ -182,3 +184,16 @@ class SyncLogEntryListView(ObjectListView): table = SyncLogEntryTable action_buttons = [] template_name = "nautobot_ssot/synclogentry_list.html" + + +class SSOTConfigView(ContentTypePermissionRequiredMixin, DjangoView): + """View with the SSOT integration configs.""" + + def get_required_permission(self): + """Permissions required for the view.""" + return "nautobot_ssot.view_ssotconfig" + + def get(self, request): + """Return table with links to configuration pages for enabled integrations.""" + enabled_integrations = list(utils.each_enabled_integration()) + return render(request, "nautobot_ssot/ssot_configs.html", {"enabled_integrations": enabled_integrations}) From c4ffef70aaaf9018590222af22a749d49bfe5d34 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 9 May 2024 10:29:48 +0100 Subject: [PATCH 036/229] Add config view to urls. --- nautobot_ssot/urls.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/urls.py b/nautobot_ssot/urls.py index f7f9025ce..ee40bc5b6 100644 --- a/nautobot_ssot/urls.py +++ b/nautobot_ssot/urls.py @@ -16,6 +16,7 @@ path("history//jobresult/", views.SyncJobResultView.as_view(), name="sync_jobresult"), path("history//logs/", views.SyncLogEntriesView.as_view(), name="sync_logentries"), path("logs/", views.SyncLogEntryListView.as_view(), name="synclogentry_list"), + path("config/", views.SSOTConfigView.as_view(), name="config"), ] From de80cc381994c9673fc44129c559951fbe56d126 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 9 May 2024 10:31:36 +0100 Subject: [PATCH 037/229] Add API urls. --- nautobot_ssot/api/urls.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 nautobot_ssot/api/urls.py diff --git a/nautobot_ssot/api/urls.py b/nautobot_ssot/api/urls.py new file mode 100644 index 000000000..b2d4bb6e7 --- /dev/null +++ b/nautobot_ssot/api/urls.py @@ -0,0 +1,13 @@ +from nautobot_ssot.integrations.utils import each_enabled_integration_module + +app_name = "ssot" # pylint: disable=invalid-name + +urlpatterns = [] + + +def _add_integrations(): + for module in each_enabled_integration_module("api.urls"): + urlpatterns.extend(module.urlpatterns) + + +_add_integrations() From 73e0cc090ae5c5aa4bff7a209fb409c40f04fc19 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:23:45 +0100 Subject: [PATCH 038/229] Add docstring. --- nautobot_ssot/api/urls.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nautobot_ssot/api/urls.py b/nautobot_ssot/api/urls.py index b2d4bb6e7..3fd743efa 100644 --- a/nautobot_ssot/api/urls.py +++ b/nautobot_ssot/api/urls.py @@ -1,3 +1,5 @@ +"""Django urlpatterns declaration for nautobot_ssot API.""" + from nautobot_ssot.integrations.utils import each_enabled_integration_module app_name = "ssot" # pylint: disable=invalid-name From 0f59c7d18fe943f6f51ad2e03a6749d347ec2c89 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:25:17 +0100 Subject: [PATCH 039/229] - Load host/a/ptr records related to IPs. - Add custom exception raised when loading fails. - Fix prefix/ip address filtering bug. - Get config from the SSOTInfobloxConfig object. - Add excluded attributes logic. --- .../infoblox/diffsync/adapters/infoblox.py | 193 ++++++++---------- 1 file changed, 89 insertions(+), 104 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 36e4c599a..84d9c7c29 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -3,12 +3,13 @@ import re from typing import Optional +import requests + from diffsync import DiffSync from diffsync.enum import DiffSyncFlags from diffsync.exceptions import ObjectAlreadyExists from nautobot.extras.plugins.exceptions import PluginImproperlyConfigured -from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import ( InfobloxIPAddress, InfobloxNamespace, @@ -24,6 +25,10 @@ ) +class AdapterLoadException(Exception): + """Raised when there's an error while loading data.""" + + class InfobloxAdapter(DiffSync): """DiffSync adapter using requests to communicate to Infoblox server.""" @@ -35,18 +40,21 @@ class InfobloxAdapter(DiffSync): top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress"] - def __init__(self, *args, job=None, sync=None, conn, **kwargs): + def __init__(self, *args, job=None, sync=None, conn, config, **kwargs): """Initialize Infoblox. Args: job (object, optional): Infoblox job. Defaults to None. sync (object, optional): Infoblox DiffSync. Defaults to None. conn (object): InfobloxAPI connection. + config (object): Infoblox config object. """ super().__init__(*args, **kwargs) self.job = job self.sync = sync self.conn = conn + self.config = config + self.excluded_attrs = config.cf_fields_ignore.get("extensible_attributes", []) self.subnets = [] if self.conn in [None, False]: @@ -55,19 +63,28 @@ def __init__(self, *args, job=None, sync=None, conn, **kwargs): ) raise PluginImproperlyConfigured - def load_network_views(self, sync_filters): - """Load Namespace DiffSync model.""" + def load_network_views(self, sync_filters: dict): + """Load Namespace DiffSync model. + + Args: + sync_filter (dict): Sync filter containing sync rules + """ network_view_filters = {sf["network_view"] for sf in sync_filters if "network_view" in sf} - networkviews = self.conn.get_network_views() - default_ext_attrs = get_default_ext_attrs(review_list=networkviews) - # TODO: Remove after development is done @progala - self.job.logger.info(f"NVFilters: {network_view_filters}, NetworkViews: {networkviews}") + try: + networkviews = self.conn.get_network_views() + except requests.exceptions.HTTPError as err: + self.job.logger.error(f"Error while loading network views: {str(err)}") + raise AdapterLoadException(str(err)) from err + + default_ext_attrs = get_default_ext_attrs(review_list=networkviews, excluded_attrs=self.excluded_attrs) for _nv in networkviews: # Do not load Network Views not present in the sync filters if _nv["name"] not in network_view_filters: continue - namespace_name = map_network_view_to_namespace(_nv["name"]) - networkview_ext_attrs = get_ext_attr_dict(extattrs=_nv.get("extattrs", {})) + namespace_name = map_network_view_to_namespace(value=_nv["name"], direction="nv_to_ns") + networkview_ext_attrs = get_ext_attr_dict( + extattrs=_nv.get("extattrs", {}), excluded_attrs=self.excluded_attrs + ) new_namespace = self.namespace( name=namespace_name, ext_attrs={**default_ext_attrs, **networkview_ext_attrs}, @@ -86,11 +103,8 @@ def _load_prefixes_filtered(self, sync_filter: dict, ip_version: str = "ipv4"): """ containers = [] subnets = [] - network_view = None - if "network_view" in sync_filter: - network_view = sync_filter["network_view"] - prefix_filter_attr = f"prefixes_{ip_version}" + network_view = sync_filter["network_view"] for prefix in sync_filter[prefix_filter_attr]: tree = self.conn.get_tree_from_container(root_container=prefix, network_view=network_view) @@ -105,11 +119,6 @@ def _load_prefixes_filtered(self, sync_filter: dict, ip_version: str = "ipv4"): else: subnets.extend(self.conn.get_all_subnets(prefix=prefix, network_view=network_view)) - # We need to remove duplicate prefixes if Network View support is not enabled - if not network_view: - containers = self.conn.remove_duplicates(containers) - subnets = self.conn.remove_duplicates(subnets) - return containers, subnets def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, include_ipv6: bool): @@ -136,7 +145,7 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in containers, subnets = self._load_prefixes_filtered(sync_filter=sync_filter, ip_version="ipv6") all_subnets.extend(subnets) all_containers.extend(containers) - # Mimic default behavior of `infoblox_network_view` setting + # Load all prefixes from a network view if there are no prefix filters if "network_view" in sync_filter and not (pfx_filter_ipv4 or pfx_filter_ipv6): network_view = sync_filter["network_view"] if include_ipv4: @@ -148,61 +157,24 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in return all_containers, all_subnets - def _load_all_prefixes_unfiltered(self, include_ipv4: bool, include_ipv6: bool): - """Loads all prefixes from Infoblox. Removes duplicates, if same prefix is found in different network views. - - Args: - include_ipv4: Whether to include IPv4 prefixes - include_ipv6: Whether to include IPv6 prefixes - - Returns: - (tuple): Tuple consisting of list of container prefixes and a list of subnet prefixes - """ - containers = [] - subnets = [] - if include_ipv4: - containers.extend(self.conn.get_network_containers()) - subnets.extend(self.conn.get_all_subnets()) - if include_ipv6: - containers.extend(self.conn.get_network_containers(ipv6=True)) - subnets.extend(self.conn.get_all_subnets(ipv6=True)) - - containers = self.conn.remove_duplicates(containers) - subnets = self.conn.remove_duplicates(subnets) - - return containers, subnets - def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list] = None): """Load InfobloxNetwork DiffSync model.""" - # TODO: Need to align it with the new filter configuration, @progala - legacy_sync_filter = {} - if PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"]: - legacy_sync_filter["network_view"] = PLUGIN_CFG["NAUTOBOT_INFOBLOX_NETWORK_VIEW"] - if PLUGIN_CFG["infoblox_import_subnets"]: - legacy_sync_filter["prefixes_ipv4"] = PLUGIN_CFG["infoblox_import_subnets"] - # TODO: Validate there's no overlap between legacy_sync_filters and sync_filter - # Alternatively, refuse to accept sync_filters if old flags are in place @progala - sync_filters = PLUGIN_CFG["infoblox_sync_filters"] - # TODO: Remove after development is done @progala - self.job.logger.info(f"sync_filters: {sync_filters}") - - if not sync_filters: - containers, subnets = self._load_all_prefixes_unfiltered( - include_ipv4=include_ipv4, include_ipv6=include_ipv6 - ) - elif sync_filters: + try: containers, subnets = self._load_all_prefixes_filtered( sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 ) + except requests.exceptions.HTTPError as err: + self.job.logger.error(f"Error while loading prefixes: {str(err)}") + raise AdapterLoadException(str(err)) from err all_networks = containers + subnets self.subnets = [(x["network"], x["network_view"]) for x in subnets] - default_ext_attrs = get_default_ext_attrs(review_list=all_networks) + default_ext_attrs = get_default_ext_attrs(review_list=all_networks, excluded_attrs=self.excluded_attrs) for _pf in all_networks: - pf_ext_attrs = get_ext_attr_dict(extattrs=_pf.get("extattrs", {})) + pf_ext_attrs = get_ext_attr_dict(extattrs=_pf.get("extattrs", {}), excluded_attrs=self.excluded_attrs) new_pf = self.prefix( network=_pf["network"], - namespace=map_network_view_to_namespace(_pf["network_view"]), + namespace=map_network_view_to_namespace(value=_pf["network_view"], direction="nv_to_ns"), description=_pf.get("comment", ""), network_type="network" if _pf in subnets else "container", ext_attrs={**default_ext_attrs, **pf_ext_attrs}, @@ -214,25 +186,27 @@ def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Op try: self.add(new_pf) except ObjectAlreadyExists: - self.job.logger.warning( - f"Duplicate prefix found: {new_pf}. Duplicate prefixes are not supported, " - "and only the first occurrence will be included in the sync. To load data " - "from a single Network View, use the 'infoblox_network_view' setting." - ) + self.job.logger.warning(f"Duplicate prefix found: {new_pf}.") def load_ipaddresses(self): """Load InfobloxIPAddress DiffSync model.""" if self.job.debug: self.job.logger.debug("Loading IP addresses from Infoblox.") - ipaddrs = self.conn.get_all_ipv4address_networks(prefixes=self.subnets) - default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs) + try: + ipaddrs = self.conn.get_all_ipv4address_networks(prefixes=self.subnets) + except requests.exceptions.HTTPError as err: + self.job.logger.error(f"Error while loading IP addresses: {str(err)}") + raise AdapterLoadException(str(err)) from err + + default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs, excluded_attrs=self.excluded_attrs) for _ip in ipaddrs: _, prefix_length = _ip["network"].split("/") dns_name = "" if _ip["names"]: dns_name = get_dns_name(possible_fqdn=_ip["names"][0]) - namespace = map_network_view_to_namespace(_ip["network_view"]) - ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {})) + namespace = map_network_view_to_namespace(value=_ip["network_view"], direction="nv_to_ns") + + ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {}), excluded_attrs=self.excluded_attrs) new_ip = self.ipaddress( address=_ip["ip_address"], prefix=_ip["network"], @@ -244,19 +218,33 @@ def load_ipaddresses(self): description=_ip["comment"], ext_attrs={**default_ext_attrs, **ip_ext_attrs}, ) - if not loaded: - self.job.logger.warning( - f"Duplicate IP Address {_ip['ip_address']}/{prefix_length} in {_ip['network']} attempting to be loaded." - ) + + # Record references to DNS Records linked to this IP Address + for ref in _ip["objects"]: + obj_type = ref.split("/")[0] + if obj_type == "record:host": + new_ip.has_host_record = True + new_ip.host_record_ref = ref + elif obj_type == "record:a": + new_ip.has_a_record = True + new_ip.a_record_ref = ref + elif obj_type == "record:ptr": + new_ip.has_ptr_record = True + new_ip.ptr_record_ref = ref + + self.add(new_ip) def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" - if self.job.debug: - self.job.logger.debug("Loading VLAN Views from Infoblox.") - vlanviews = self.conn.get_vlanviews() - default_ext_attrs = get_default_ext_attrs(review_list=vlanviews) + try: + vlanviews = self.conn.get_vlanviews() + except requests.exceptions.HTTPError as err: + self.job.logger.error(f"Error while loading VLAN views: {str(err)}") + raise AdapterLoadException(str(err)) from err + + default_ext_attrs = get_default_ext_attrs(review_list=vlanviews, excluded_attrs=self.excluded_attrs) for _vv in vlanviews: - vv_ext_attrs = get_ext_attr_dict(extattrs=_vv.get("extattrs", {})) + vv_ext_attrs = get_ext_attr_dict(extattrs=_vv.get("extattrs", {}), excluded_attrs=self.excluded_attrs) new_vv = self.vlangroup( name=_vv["name"], description=_vv["comment"] if _vv.get("comment") else "", @@ -268,10 +256,15 @@ def load_vlans(self): """Load InfobloxVlan DiffSync model.""" if self.job.debug: self.job.logger.debug("Loading VLANs from Infoblox.") - vlans = self.conn.get_vlans() - default_ext_attrs = get_default_ext_attrs(review_list=vlans) + try: + vlans = self.conn.get_vlans() + except requests.exceptions.HTTPError as err: + self.job.logger.error(f"Error while loading VLANs: {str(err)}") + raise AdapterLoadException(str(err)) from err + + default_ext_attrs = get_default_ext_attrs(review_list=vlans, excluded_attrs=self.excluded_attrs) for _vlan in vlans: - vlan_ext_attrs = get_ext_attr_dict(extattrs=_vlan.get("extattrs", {})) + vlan_ext_attrs = get_ext_attr_dict(extattrs=_vlan.get("extattrs", {}), excluded_attrs=self.excluded_attrs) vlan_group = re.search(r"(?:.+\:)(\S+)(?:\/\S+\/.+)", _vlan["_ref"]) new_vlan = self.vlan( name=_vlan["name"], @@ -285,28 +278,20 @@ def load_vlans(self): def load(self): """Load all models by calling other methods.""" - # Set ipv4 import to True as default - include_ipv4 = True - sync_filters = PLUGIN_CFG["infoblox_sync_filters"] - if "infoblox_import_objects" in PLUGIN_CFG: - # Use config setting to decide whether to import ipv6 - include_ipv6 = PLUGIN_CFG["infoblox_import_objects"].get("subnets_ipv6") - self.load_network_views(include_ipv4=include_ipv4, include_ipv6=include_ipv6, sync_filters=sync_filters) - if PLUGIN_CFG["infoblox_import_objects"].get("subnets"): - self.load_prefixes(sync_filters=sync_filters) - if PLUGIN_CFG["infoblox_import_objects"].get("ip_addresses"): - self.load_ipaddresses() - if PLUGIN_CFG["infoblox_import_objects"].get("vlan_views"): - self.load_vlanviews() - if PLUGIN_CFG["infoblox_import_objects"].get("vlans"): - self.load_vlans() - else: - self.job.logger.info("The `infoblox_import_objects` setting was not found so all objects will be imported.") - self.load_prefixes() + include_ipv4 = self.config.import_ipv4 + include_ipv6 = self.config.import_ipv4 + sync_filters = self.config.infoblox_sync_filters + + self.load_network_views(sync_filters=sync_filters) + if self.config.import_subnets: + self.load_prefixes(include_ipv4=include_ipv4, include_ipv6=include_ipv6, sync_filters=sync_filters) + if self.config.import_ip_addresses: self.load_ipaddresses() + if self.config.import_vlan_views: self.load_vlanviews() + if self.config.import_vlans: self.load_vlans() - for obj in ["prefix", "ipaddress", "vlangroup", "vlan", "namespace"]: + for obj in ["namespace", "prefix", "ipaddress", "vlangroup", "vlan"]: if obj in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()[obj])} {obj} from Infoblox.") From 9ac722642af144b8d9f11b31d04206f994c1c495 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:28:18 +0100 Subject: [PATCH 040/229] - Allow excluding attributes/cfs from diffsync comparison. - Add support for adding IP as a/ptr record. - Fix bug in loading filtered prefixes/ip addresses. --- .../infoblox/diffsync/adapters/nautobot.py | 111 ++++++++++-------- 1 file changed, 64 insertions(+), 47 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index e09613d67..f926c81ab 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -13,7 +13,7 @@ from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot.tenancy.models import Tenant -from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG, TAG_COLOR +from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR from nautobot_ssot.integrations.infoblox.diffsync.models import ( NautobotIPAddress, NautobotNamespace, @@ -23,6 +23,7 @@ ) from nautobot_ssot.integrations.infoblox.utils.diffsync import ( get_default_custom_fields, + get_valid_custom_fields, map_network_view_to_namespace, nautobot_vlan_status, ) @@ -114,16 +115,19 @@ class NautobotAdapter(NautobotMixin, DiffSync): # pylint: disable=too-many-inst vlan_map = {} vlangroup_map = {} - def __init__(self, *args, job=None, sync=None, **kwargs): + def __init__(self, *args, job=None, sync=None, config, **kwargs): """Initialize Nautobot. Args: job (object, optional): Nautobot job. Defaults to None. sync (object, optional): Nautobot DiffSync. Defaults to None. + config (object): Infoblox config object. """ super().__init__(*args, **kwargs) self.job = job self.sync = sync + self.config = config + self.excluded_cfs = config.cf_fields_ignore.get("custom_fields", []) def sync_complete(self, source: DiffSync, *args, **kwargs): """Process object creations/updates using bulk operations. @@ -137,9 +141,7 @@ def _get_namespaces_from_sync_filters(self, sync_filters: list) -> set: """Get namespaces defined in filters.""" namespaces = set() for sync_filter in sync_filters: - if "network_view" not in sync_filter: - continue - namespace_name = map_network_view_to_namespace(sync_filter["network_view"]) + namespace_name = map_network_view_to_namespace(value=sync_filter["network_view"], direction="nv_to_ns") namespaces.add(namespace_name) return namespaces @@ -154,12 +156,15 @@ def load_namespaces(self, sync_filters: Optional[list] = None): else: all_namespaces = Namespace.objects.all() - default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(Namespace)) + default_cfs = get_default_custom_fields( + cf_contenttype=ContentType.objects.get_for_model(Namespace), excluded_cfs=self.excluded_cfs + ) for namespace in all_namespaces: self.namespace_map[namespace.name] = namespace.id + custom_fields = get_valid_custom_fields(namespace.custom_field_data, excluded_cfs=self.excluded_cfs) _namespace = self.namespace( name=namespace.name, - ext_attrs={**default_cfs, **namespace.custom_field_data}, + ext_attrs={**default_cfs, **custom_fields}, pk=namespace.id, ) try: @@ -182,46 +187,45 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in for sync_filter in sync_filters: query_filters = {} if "network_view" in sync_filter: - query_filters["namespace__name"] = sync_filter["network_view"] + namespace = map_network_view_to_namespace(sync_filter["network_view"], direction="nv_to_ns") + query_filters["namespace__name"] = namespace if "prefixes_ipv4" in sync_filter and include_ipv4: for pfx_ipv4 in sync_filter["prefixes_ipv4"]: query_filters["network__net_contained_or_equal"] = pfx_ipv4 - all_prefixes.union(Prefix.objects.filter(**query_filters)) + all_prefixes = all_prefixes.union(Prefix.objects.filter(**query_filters)) if "prefixes_ipv6" in sync_filter and include_ipv6: for pfx_ipv6 in sync_filter["prefixes_ipv6"]: query_filters["network__net_contained_or_equal"] = pfx_ipv6 - all_prefixes.union(Prefix.objects.filter(**query_filters)) + all_prefixes = all_prefixes.union(Prefix.objects.filter(**query_filters)) # Filter on namespace name only if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: - all_prefixes.union(Prefix.objects.filter(**query_filters)) + all_prefixes = all_prefixes.union(Prefix.objects.filter(**query_filters)) return all_prefixes def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list]): """Load Prefixes from Nautobot.""" - if not sync_filters: - all_prefixes = Prefix.objects.all() - else: - all_prefixes = self._load_all_prefixes_filtered( - sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 - ) + all_prefixes = self._load_all_prefixes_filtered( + sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) - default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(Prefix)) + default_cfs = get_default_custom_fields( + cf_contenttype=ContentType.objects.get_for_model(Prefix), excluded_cfs=self.excluded_cfs + ) for prefix in all_prefixes: self.prefix_map[(prefix.namespace.name), str(prefix.prefix)] = prefix.id - if "ssot_synced_to_infoblox" in prefix.custom_field_data: - prefix.custom_field_data.pop("ssot_synced_to_infoblox") + dhcp_ranges = prefix.cf.get("dhcp_ranges") current_vlans = get_prefix_vlans(prefix=prefix) + custom_fields = get_valid_custom_fields(prefix.custom_field_data, excluded_cfs=self.excluded_cfs) _prefix = self.prefix( network=str(prefix.prefix), namespace=prefix.namespace.name, description=prefix.description, network_type=prefix.type, - ext_attrs={**default_cfs, **prefix.custom_field_data}, + ext_attrs={**default_cfs, **custom_fields}, vlans=build_vlan_map_from_relations(vlans=current_vlans), pk=prefix.id, ) - dhcp_ranges = prefix.cf.get("dhcp_ranges") if dhcp_ranges: _prefix.ranges = dhcp_ranges.split(",") try: @@ -244,28 +248,28 @@ def _load_all_ipaddresses_filtered(self, sync_filters: list, include_ipv4: bool, for sync_filter in sync_filters: query_filters = {} if "network_view" in sync_filter: - query_filters["parent__namespace__name"] = sync_filter["network_view"] + namespace = map_network_view_to_namespace(sync_filter["network_view"], direction="nv_to_ns") + query_filters["parent__namespace__name"] = namespace if "prefixes_ipv4" in sync_filter and include_ipv4: query_filters["host__net_in"] = sync_filter["prefixes_ipv4"] - all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + all_ipaddresses = all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) if "prefixes_ipv6" in sync_filter and include_ipv6: query_filters["host__net_in"] = sync_filter["prefixes_ipv6"] - all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + all_ipaddresses = all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) # Filter on namespace name only if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: - all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) + all_ipaddresses = all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) return all_ipaddresses def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): """Load IP Addresses from Nautobot.""" - default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(IPAddress)) - if not sync_filters: - all_ipaddresses = IPAddress.objects.all() - else: - all_ipaddresses = self._load_all_ipaddresses_filtered( - sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 - ) + default_cfs = get_default_custom_fields( + cf_contenttype=ContentType.objects.get_for_model(IPAddress), excluded_cfs=self.excluded_cfs + ) + all_ipaddresses = self._load_all_ipaddresses_filtered( + sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + ) for ipaddr in all_ipaddresses: self.ipaddr_map[str(ipaddr.address)] = ipaddr.id addr = ipaddr.host @@ -286,8 +290,7 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: ) continue - if "ssot_synced_to_infoblox" in ipaddr.custom_field_data: - ipaddr.custom_field_data.pop("ssot_synced_to_infoblox") + custom_fields = get_valid_custom_fields(ipaddr.custom_field_data, excluded_cfs=self.excluded_cfs) _ip = self.ipaddress( address=addr, prefix=str(prefix), @@ -297,9 +300,20 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: prefix_length=prefix.prefix_length if prefix else ipaddr.prefix_length, dns_name=ipaddr.dns_name, description=ipaddr.description, - ext_attrs={**default_cfs, **ipaddr.custom_field_data}, + ext_attrs={**default_cfs, **custom_fields}, pk=ipaddr.id, ) + + # Pretend IP Address has matching DNS records if dns name is defined. + # This will be compared against values set on Infoblox side. + if ipaddr.dns_name: + if self.config.create_host_record: + _ip.has_host_record = True + elif self.config.create_a_record: + _ip.has_a_record = True + if self.config.create_ptr_record: + _ip.has_ptr_record = True + try: self.add(_ip) except ObjectAlreadyExists: @@ -307,46 +321,49 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: def load_vlangroups(self): """Load VLAN Groups from Nautobot.""" - default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(VLANGroup)) + default_cfs = get_default_custom_fields( + cf_contenttype=ContentType.objects.get_for_model(VLANGroup), excluded_cfs=self.excluded_cfs + ) for grp in VLANGroup.objects.all(): self.vlangroup_map[grp.name] = grp.id - if "ssot_synced_to_infoblox" in grp.custom_field_data: - grp.custom_field_data.pop("ssot_synced_to_infoblox") + custom_fields = get_valid_custom_fields(grp.custom_field_data, excluded_cfs=self.excluded_cfs) _vg = self.vlangroup( name=grp.name, description=grp.description, - ext_attrs={**default_cfs, **grp.custom_field_data}, + ext_attrs={**default_cfs, **custom_fields}, pk=grp.id, ) self.add(_vg) def load_vlans(self): """Load VLANs from Nautobot.""" - default_cfs = get_default_custom_fields(cf_contenttype=ContentType.objects.get_for_model(VLAN)) + default_cfs = get_default_custom_fields( + cf_contenttype=ContentType.objects.get_for_model(VLAN), excluded_cfs=self.excluded_cfs + ) # To ensure we are only dealing with VLANs imported from Infoblox we need to filter to those with a # VLAN Group assigned to match how Infoblox requires a VLAN View to be associated to VLANs. for vlan in VLAN.objects.filter(vlan_group__isnull=False): if vlan.vlan_group.name not in self.vlan_map: self.vlan_map[vlan.vlan_group.name] = {} self.vlan_map[vlan.vlan_group.name][vlan.vid] = vlan.id - if "ssot_synced_to_infoblox" in vlan.custom_field_data: - vlan.custom_field_data.pop("ssot_synced_to_infoblox") + custom_fields = get_valid_custom_fields(vlan.custom_field_data, excluded_cfs=self.excluded_cfs) _vlan = self.vlan( vid=vlan.vid, name=vlan.name, description=vlan.description, vlangroup=vlan.vlan_group.name if vlan.vlan_group else "", status=nautobot_vlan_status(vlan.status.name), - ext_attrs={**default_cfs, **vlan.custom_field_data}, + ext_attrs={**default_cfs, **custom_fields}, pk=vlan.id, ) self.add(_vlan) def load(self): """Load models with data from Nautobot.""" - include_ipv4 = True - include_ipv6 = PLUGIN_CFG.get("infoblox_import_objects", {}).get("subnets_ipv6", False) - sync_filters = PLUGIN_CFG["infoblox_sync_filters"] + include_ipv4 = self.config.import_ipv4 + include_ipv6 = self.config.import_ipv4 + sync_filters = self.config.infoblox_sync_filters + self.relationship_map = {r.label: r.id for r in Relationship.objects.only("id", "label")} self.status_map = {s.name: s.id for s in Status.objects.only("id", "name")} self.location_map = {loc.name: loc.id for loc in Location.objects.only("id", "name")} From b9e4da9a658b2c0266e4e71448b4f074f2f06b4e Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:31:37 +0100 Subject: [PATCH 041/229] Add API for SSOTInfobloxConfig. --- .../integrations/infoblox/api/__init__.py | 1 + .../integrations/infoblox/api/serializers.py | 15 +++++++++++++++ nautobot_ssot/integrations/infoblox/api/urls.py | 12 ++++++++++++ nautobot_ssot/integrations/infoblox/api/views.py | 15 +++++++++++++++ 4 files changed, 43 insertions(+) create mode 100644 nautobot_ssot/integrations/infoblox/api/__init__.py create mode 100644 nautobot_ssot/integrations/infoblox/api/serializers.py create mode 100644 nautobot_ssot/integrations/infoblox/api/urls.py create mode 100644 nautobot_ssot/integrations/infoblox/api/views.py diff --git a/nautobot_ssot/integrations/infoblox/api/__init__.py b/nautobot_ssot/integrations/infoblox/api/__init__.py new file mode 100644 index 000000000..777ad3b40 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/api/__init__.py @@ -0,0 +1 @@ +"""REST API module for nautobot_ssot infoblox integration.""" diff --git a/nautobot_ssot/integrations/infoblox/api/serializers.py b/nautobot_ssot/integrations/infoblox/api/serializers.py new file mode 100644 index 000000000..c1109dafb --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/api/serializers.py @@ -0,0 +1,15 @@ +"""API serializers for nautobot_ssot infoblox.""" + +from nautobot.apps.api import NautobotModelSerializer + +from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig + + +class SSOTInfobloxConfigSerializer(NautobotModelSerializer): # pylint: disable=too-many-ancestors + """REST API serializer for SSOTInfobloxConfig records.""" + + class Meta: + """Meta attributes.""" + + model = SSOTInfobloxConfig + fields = "__all__" diff --git a/nautobot_ssot/integrations/infoblox/api/urls.py b/nautobot_ssot/integrations/infoblox/api/urls.py new file mode 100644 index 000000000..625540967 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/api/urls.py @@ -0,0 +1,12 @@ +"""Django urlpatterns declaration for nautobot_ssot infoblox API.""" + +from rest_framework import routers + +from nautobot_ssot.integrations.infoblox.api.views import SSOTInfobloxConfigView + +router = routers.DefaultRouter() + +router.register("config/infoblox", SSOTInfobloxConfigView) +app_name = "ssot" # pylint: disable=invalid-name + +urlpatterns = router.urls diff --git a/nautobot_ssot/integrations/infoblox/api/views.py b/nautobot_ssot/integrations/infoblox/api/views.py new file mode 100644 index 000000000..65408758b --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/api/views.py @@ -0,0 +1,15 @@ +"""API views for nautobot_ssot infoblox.""" + +from nautobot.apps.api import NautobotModelViewSet + +from nautobot_ssot.integrations.infoblox.filters import SSOTInfobloxConfigFilterSet +from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig +from .serializers import SSOTInfobloxConfigSerializer + + +class SSOTInfobloxConfigView(NautobotModelViewSet): # pylint: disable=too-many-ancestors + """API CRUD operations set for the SSOTInfobloxConfig view.""" + + queryset = SSOTInfobloxConfig.objects.all() + filterset_class = SSOTInfobloxConfigFilterSet + serializer_class = SSOTInfobloxConfigSerializer From 1541af4a90e9ac446bd26fa372067543b5a600b2 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:32:29 +0100 Subject: [PATCH 042/229] Add fixtures. --- .../fixtures/get_a_record_by_ref.json | 6 ++++ .../fixtures/get_all_network_views.json | 35 +++++++++++++++++++ .../get_authoritative_zones_for_dns_view.json | 14 ++++++++ .../infoblox/fixtures/get_network_view.json | 13 +++++++ .../fixtures/get_ptr_record_by_ip.json | 16 +++++++++ .../fixtures/get_ptr_record_by_ref.json | 8 +++++ 6 files changed, 92 insertions(+) create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_a_record_by_ref.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_all_network_views.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_authoritative_zones_for_dns_view.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_network_view.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ip.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ref.json diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_a_record_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_a_record_by_ref.json new file mode 100644 index 000000000..88371fd9a --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_a_record_by_ref.json @@ -0,0 +1,6 @@ +{ + "_ref": "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdC5uYXV0b2JvdCx0ZXN0ZG5zbmFtZSwxMC4wLjAuMQ:testdnsname.nautobot.test/default", + "ipv4addr": "10.0.0.1", + "name": "testdnsname.nautobot.test", + "view": "default" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_all_network_views.json b/nautobot_ssot/tests/infoblox/fixtures/get_all_network_views.json new file mode 100644 index 000000000..05b99b369 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_all_network_views.json @@ -0,0 +1,35 @@ +[ + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true", + "associated_dns_views": [ + "default" + ], + "extattrs": { + + }, + "is_default": true, + "name": "default" + }, + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQx:prod/false", + "associated_dns_views": [ + "default.prod" + ], + "extattrs": { + + }, + "is_default": false, + "name": "prod" + }, + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQy:dev/false", + "associated_dns_views": [ + "default.dev" + ], + "extattrs": { + + }, + "is_default": false, + "name": "dev" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_authoritative_zones_for_dns_view.json b/nautobot_ssot/tests/infoblox/fixtures/get_authoritative_zones_for_dns_view.json new file mode 100644 index 000000000..3eab1e3ac --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_authoritative_zones_for_dns_view.json @@ -0,0 +1,14 @@ +{ + "result": [ + { + "_ref": "zone_auth/ZG5zLnpvbmUkLjIuYXJwYS5pbi1hZGRy:0.0.0.0%2F0/default.dev", + "fqdn": "0.0.0.0/0", + "view": "default.dev" + }, + { + "_ref": "zone_auth/ZG5zLnpvbmUkLjIudGVzdC5sb2NhbC5uYXV0b2JvdA:nautobot.local.test/default.dev", + "fqdn": "nautobot.local.test", + "view": "default.dev" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_view.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_view.json new file mode 100644 index 000000000..8f032a17f --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_view.json @@ -0,0 +1,13 @@ +[ + { + "_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQy:dev/false", + "associated_dns_views": [ + "default.dev" + ], + "extattrs": { + + }, + "is_default": false, + "name": "dev" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ip.json b/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ip.json new file mode 100644 index 000000000..0acc7e1dc --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ip.json @@ -0,0 +1,16 @@ +{ + "result": [ + { + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuMS5ob3N0MS5uYXV0b2JvdC5sb2NhbC50ZXN0:1.0.0.10.in-addr.arpa/default.dev", + "extattrs": { + + }, + "ipv4addr": "10.0.0.1", + "ipv6addr": "", + "name": "1.0.0.10.in-addr.arpa", + "ptrdname": "host1.nautobot.local.test", + "view": "default.dev", + "zone": "in-addr.arpa" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ref.json new file mode 100644 index 000000000..9b4580ea9 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_ptr_record_by_ref.json @@ -0,0 +1,8 @@ +{ + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuMS5ob3N0MS5uYXV0b2JvdC5sb2NhbC50ZXN0:1.0.0.10.in-addr.arpa/default.dev", + "ipv4addr": "10.0.0.1", + "ipv6addr": "", + "name": "1.0.0.10.in-addr.arpa", + "ptrdname": "host1.nautobot.local.test", + "view": "default.dev" +} \ No newline at end of file From 7a308e43cf13aae15f4ff23b5a9a81ac5f8eac85 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:34:06 +0100 Subject: [PATCH 043/229] Add support for creating IPs as a/ptr records. --- .../infoblox/diffsync/models/base.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index 7579f270e..1077cafe7 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -68,7 +68,16 @@ class IPAddress(DiffSyncModel): _modelname = "ipaddress" _identifiers = ("address", "prefix", "prefix_length", "namespace") - _attributes = ("description", "dns_name", "status", "ip_addr_type", "ext_attrs") + _attributes = ( + "description", + "dns_name", + "status", + "ip_addr_type", + "ext_attrs", + "has_host_record", + "has_a_record", + "has_ptr_record", + ) address: str dns_name: str @@ -79,4 +88,11 @@ class IPAddress(DiffSyncModel): ip_addr_type: Optional[str] description: Optional[str] ext_attrs: Optional[dict] + has_a_record: bool = False + has_host_record: bool = False + has_ptr_record: bool = False + pk: Optional[uuid.UUID] = None + a_record_ref: Optional[str] = None + host_record_ref: Optional[str] = None + ptr_record_ref: Optional[str] = None From e30af024abdcfd9856c259e45f3a782252fceb19 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:39:22 +0100 Subject: [PATCH 044/229] - Allow creating IP Address as host or a with optional ptr record. - Don't allow changes to Network View in Infoblox. --- .../infoblox/diffsync/models/infoblox.py | 145 ++++++++++++++++-- 1 file changed, 133 insertions(+), 12 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 5e036cefb..265279210 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -2,6 +2,7 @@ from requests.exceptions import HTTPError from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.utils.diffsync import map_network_view_to_namespace, validate_dns_name class InfobloxNetwork(Network): @@ -41,7 +42,7 @@ def update(self, attrs): if attrs.get("ranges"): self.diffsync.job.logger.warning( f"Prefix, {self.network}, has a change of Ranges in Nautobot, but" - "updating InfoBlox with Ranges is currently not supported." + " updating Ranges in InfoBlox is currently not supported." ) return super().update(attrs) @@ -81,23 +82,123 @@ class InfobloxIPAddress(IPAddress): @classmethod def create(cls, diffsync, ids, attrs): - """Create either a host record or fixed address (Not implemented). + """Create either a Host record or an A record. - This requires the IP Address to either have a DNS name + Optionally creates a PTR record in addition to an A record. + + This requires the IP Address to have a DNS name """ - if attrs["dns_name"]: - diffsync.conn.create_host_record(attrs["dns_name"], ids["address"]) + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + dns_name = attrs.get("dns_name") + ip_address = ids["address"] + diffsync.job.logger.warning(f"IP Address {ip_address}. DNS name: {dns_name}.") + if not dns_name: + diffsync.job.logger.warning( + f"Cannot create Infoblox record for IP Address {ip_address}. DNS name is not defined." + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(diffsync.conn, dns_name, network_view): + diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + if diffsync.config.create_a_record: + diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) + # Only create PTR records if A record has been created + if diffsync.config.create_ptr_record: + diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) + elif diffsync.config.create_host_record: + diffsync.conn.create_host_record(dns_name, ip_address) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - def update(self, attrs): + def update(self, attrs): # pylint: disable=too-many-branches """Update IP Address object in Infoblox.""" - json = {"configure_for_dns": False} + ids = self.get_identifiers() + inf_attrs = self.get_attrs() + ip_address = ids["address"] + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + payload = {} if attrs.get("description"): - json.update({"comment": attrs["description"]}) + payload.update({"comment": attrs["description"]}) if attrs.get("dns_name"): - json.update({"name": attrs["dns_name"]}) - if json: - self.diffsync.conn.update_ipaddress(ip_address=self.get_identifiers()["address"], data=json) + payload.update({"name": attrs["dns_name"]}) + + # Nautobot side doesn't check if dns name is fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. + # We get either existing DNS name, or a new one. This is because name might be the same but we need to create a PTR record. + dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) + if not dns_name: + self.diffsync.job.logger.warning( + f"Cannot update Infoblox record for IP Address {ip_address}. DNS name is not defined." + ) + return super().update(attrs) + if not validate_dns_name(self.diffsync.conn, dns_name, network_view): + self.diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") + return super().update(attrs) + + # Infoblox Host record acts as a combined A/PTR record. + # Only allow creating/updating A and PTR record if IP Address doesn't have a corresponding Host record. + # Only allows creating/updating Host record if IP Address doesn't have a corresponding A or PTR record. + incompatible_record_types = False + if attrs.get("has_a_record", False) and self.diffsync.config.create_a_record and inf_attrs["has_host_record"]: + incomp_msg = ( + f"Cannot create/update A Record for IP Address, {ip_address}. It already has an existing Host Record." + ) + incompatible_record_types = True + elif ( + attrs.get("has_ptr_record", False) + and self.diffsync.config.create_ptr_record + and inf_attrs["has_host_record"] + ): + incomp_msg = ( + f"Cannot create/update PTR Record for IP Address, {ip_address}. It already has an existing Host Record" + ) + incompatible_record_types = True + elif ( + attrs.get("has_host_record", False) + and self.diffsync.config.create_host_record + and inf_attrs["has_a_record"] + ): + incomp_msg = ( + f"Cannot create/update Host Record for IP Address, {ip_address}. It already has an existing A Record" + ) + incompatible_record_types = True + elif ( + attrs.get("has_host_record", False) + and self.diffsync.config.create_host_record + and inf_attrs["has_ptr_record"] + ): + incomp_msg = ( + f"Cannot create/update Host Record for IP Address, {ip_address}. It already has an existing PTR Record" + ) + incompatible_record_types = True + + if incompatible_record_types: + self.diffsync.job.logger.warning(incomp_msg) + return super().update(attrs) + + a_record_action = "none" + ptr_record_action = "none" + host_record_action = "none" + if self.diffsync.config.create_a_record and inf_attrs["has_a_record"]: + a_record_action = "update" + if self.diffsync.config.create_ptr_record: + ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" + elif self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: + host_record_action = "update" + + # IP Address in Infoblox is not a plain IP Address like in Nautobot. + # In Infoblox we can fixed_address (not supported here), Host record for IP Address, or A Record for IP Address. + # When syncing from Nautobot to Infoblox we take IP Address and check if it has dns_name field populated. + # We then combine this with the Infoblox Config toggles to arrive at the desired state in Infoblox. + if host_record_action == "update" and payload: + self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=payload) + if a_record_action == "update" and payload: + self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=payload) + if ptr_record_action == "update" and payload: + self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=payload) + elif ptr_record_action == "create": + self.diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) return super().update(attrs) # def delete(self): @@ -109,4 +210,24 @@ def update(self, attrs): class InfobloxNamespace(Namespace): """Infoblox implementation of the Namespace model.""" - # Currently there are no plans to modify Network Views in Infoblox + @classmethod + def create(cls, diffsync, ids, attrs): + """Don't allow creating Network Views in Infoblox.""" + diffsync.job.logger.error( + f"Creating Network Views in Infoblox is not allowed. Nautobot Namespace: {ids['name']}" + ) + raise NotImplementedError + + def update(self, attrs): + """Don't allow updating Network Views in Infoblox.""" + self.diffsync.job.logger.error( + f"Updating Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" + ) + raise NotImplementedError + + def delete(self): + """Don't allow deleting Network Views in Infoblox.""" + self.diffsync.job.logger.error( + f"Deleting Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" + ) + raise NotImplementedError From 674015e5746a9387e58c15dc2f6f006fc2f71f66 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:40:46 +0100 Subject: [PATCH 045/229] Update tests. --- .../tests/infoblox/fixtures_infoblox.py | 112 ++++++++++- nautobot_ssot/tests/infoblox/test_client.py | 174 +++++++++++++++++- .../tests/infoblox/test_infoblox_adapter.py | 59 +++--- .../tests/infoblox/test_nautobot_adapter.py | 16 +- .../tests/infoblox/test_tags_and_cfs.py | 36 +++- 5 files changed, 353 insertions(+), 44 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index 2b05e7f91..c1cc2f6ca 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -6,11 +6,17 @@ import os from django.contrib.contenttypes.models import ContentType -from nautobot.extras.choices import RelationshipTypeChoices -from nautobot.extras.models import Relationship +from nautobot.extras.choices import ( + RelationshipTypeChoices, + SecretsGroupAccessTypeChoices, + SecretsGroupSecretTypeChoices, +) +from nautobot.extras.models import ExternalIntegration, Relationship, Secret, SecretsGroup, SecretsGroupAssociation, Status from nautobot.ipam.models import Prefix, VLAN + from nautobot_ssot.integrations.infoblox.utils import client +from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig FIXTURES = os.environ.get("FIXTURE_DIR", "nautobot_ssot/tests/infoblox/fixtures") @@ -23,10 +29,80 @@ def _json_read_fixture(name): return json.load(fixture) +def create_default_infoblox_config(infoblox_url="infoblox.example.com"): + default_status = Status.objects.get(name="Active") + infoblox_sync_filters = [{"network_view": "default"}] + secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTUnitTesting") + infoblox_username, _ = Secret.objects.get_or_create( + name="Infoblox Username - Unit Testing", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_USERNAME"}, + }, + ) + infoblox_password, _ = Secret.objects.get_or_create( + name="Infoblox Password - Unit Testing", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_PASSWORD"}, + }, + ) + SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + defaults={ + "secret": infoblox_username, + }, + ) + SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + defaults={ + "secret": infoblox_password, + }, + ) + external_integration, _ = ExternalIntegration.objects.get_or_create( + name="InfobloxUnitTestingInstance", + remote_url=infoblox_url, + secrets_group=secrets_group, + verify_ssl=True, + timeout=60, + ) + + config, _ = SSOTInfobloxConfig.objects.get_or_create( + name="InfobloxUnitTestConfig", + defaults=dict( # pylint: disable=use-dict-literal + description="Unit Test Config.", + default_status=default_status, + infoblox_wapi_version="v2.12", + infoblox_instance=external_integration, + enable_sync_to_infoblox=True, + import_ip_addresses=True, + import_subnets=True, + import_vlan_views=True, + import_vlans=True, + import_ipv4=True, + import_ipv6=True, + job_enabled=True, + infoblox_sync_filters=infoblox_sync_filters, + ), + ) + + return config + + def localhost_client_infoblox(localhost_url): """Return InfobloxAPI client for testing.""" return client.InfobloxApi( # nosec - url=localhost_url, username="test-user", password="test-password", verify_ssl=False, cookie=None + url=localhost_url, + username="test-user", + password="test-password", + verify_ssl=False, + wapi_version="v2.12", + timeout=60, + cookie=None, ) @@ -94,6 +170,11 @@ def get_a_record_by_name(): return _json_read_fixture("get_a_record_by_name.json") +def get_a_record_by_ref(): + """Return a get A record by ref response.""" + return _json_read_fixture("get_a_record_by_ref.json") + + def get_host_record_by_name(): """Return a get Host record by name response.""" return _json_read_fixture("get_host_record_by_name.json") @@ -124,16 +205,31 @@ def get_authoritative_zone(): return _json_read_fixture("get_authoritative_zone.json") +def get_authoritative_zones_for_dns_view(): + """Return a get authoritative zones for view response.""" + return _json_read_fixture("get_authoritative_zones_for_dns_view.json") + + def find_network_reference(): """Return a find network reference response.""" return _json_read_fixture("find_network_reference.json") +def get_ptr_record_by_ip(): + """Return a get PTR record by IP response.""" + return _json_read_fixture("get_ptr_record_by_ip.json") + + def get_ptr_record_by_name(): """Return a get PTR record by name response.""" return _json_read_fixture("get_ptr_record_by_name.json") +def get_ptr_record_by_ref(): + """Return a get PTR record by ref response.""" + return _json_read_fixture("get_ptr_record_by_ref.json") + + def find_next_available_ip(): """Return a next available IP response.""" return _json_read_fixture("find_next_available_ip.json") @@ -154,6 +250,16 @@ def get_network_containers_ipv6(): return _json_read_fixture("get_network_containers_ipv6.json") +def get_all_network_views(): + """Return a all_network_views response.""" + return _json_read_fixture("get_all_network_views.json") + + +def get_network_view(): + """Return a get_network_view response.""" + return _json_read_fixture("get_network_view.json") + + def get_all_ranges(): """Return a get all ranges response.""" return _json_read_fixture("get_all_ranges.json") diff --git a/nautobot_ssot/tests/infoblox/test_client.py b/nautobot_ssot/tests/infoblox/test_client.py index 82f652d8d..f636ec713 100644 --- a/nautobot_ssot/tests/infoblox/test_client.py +++ b/nautobot_ssot/tests/infoblox/test_client.py @@ -13,7 +13,9 @@ from nautobot_ssot.integrations.infoblox.utils.client import InvalidUrlScheme, get_dns_name from .fixtures_infoblox import ( + get_ptr_record_by_ip, get_ptr_record_by_name, + get_ptr_record_by_ref, localhost_client_infoblox, get_all_ipv4address_networks, get_all_ipv4address_networks_medium, @@ -25,6 +27,7 @@ get_host_by_ip, get_a_record_by_ip, get_a_record_by_name, + get_a_record_by_ref, get_host_record_by_name, get_all_dns_views, get_dhcp_lease_from_ipv4, @@ -32,8 +35,11 @@ get_all_ranges, get_all_subnets, get_authoritative_zone, + get_authoritative_zones_for_dns_view, get_network_containers, get_network_containers_ipv6, + get_all_network_views, + get_network_view, find_network_reference, find_next_available_ip, search_ipv4_address, @@ -275,7 +281,7 @@ def test_get_a_record_by_ip_success(self): req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) resp = self.infoblox_client.get_a_record_by_ip(mock_ip) - self.assertEqual(resp, mock_response["result"]) + self.assertEqual(resp, mock_response["result"][0]) def test_get_a_record_by_ip_fail(self): """Test get_a_record_by_ip fail.""" @@ -290,6 +296,33 @@ def test_get_a_record_by_ip_fail(self): self.assertEqual(context.exception.response.status_code, 404) + def test_get_a_record_by_ref_success(self): + """Test get_a_record_by_ref success.""" + mock_ref = ( + "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default" + ) + mock_response = get_a_record_by_ref() + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_a_record_by_ref(mock_ref) + + self.assertEqual(resp, mock_response) + + def test_get_a_record_by_ref_fail(self): + """Test get_a_record_by_ref fail.""" + mock_ref = ( + "record:a/aG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default" + ) + mock_response = "" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=404) + with self.assertRaises(HTTPError) as context: + self.infoblox_client.get_a_record_by_ref(mock_ref) + + self.assertEqual(context.exception.response.status_code, 404) + def test_get_all_dns_views_success(self): """Test get_all_dns_views success.""" mock_response = get_all_dns_views() @@ -659,6 +692,58 @@ def test_get_ptr_record_by_name_fail(self): self.assertEqual(context.exception.response.status_code, 404) + def test_get_ptr_record_by_ip_success(self): + """Test get_ptr_record_by_ip success.""" + mock_ip = "10.0.0.1" + mock_response = get_ptr_record_by_ip() + mock_uri = "record:ptr" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_ptr_record_by_name(mock_ip) + + self.assertEqual(resp, mock_response["result"]) + + def test_get_ptr_record_by_ip_fail(self): + """Test get_ptr_record_by_ip success.""" + mock_ip = "10.0.0.2" + mock_response = "" + mock_uri = "record:ptr" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=404) + with self.assertRaises(HTTPError) as context: + self.infoblox_client.get_ptr_record_by_ip(mock_ip) + + self.assertEqual(context.exception.response.status_code, 404) + + def test_get_ptr_record_by_ref_success(self): + """Test get_ptr_record_by_ref success.""" + mock_ref = ( + "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default" + ) + mock_response = get_ptr_record_by_ref() + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_ptr_record_by_ref(mock_ref) + + self.assertEqual(resp, mock_response) + + def test_get_ptr_record_by_ref_fail(self): + """Test get_ptr_record_by_ref fail.""" + mock_ref = ( + "record:a/aG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default" + ) + mock_response = "" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=404) + with self.assertRaises(HTTPError) as context: + self.infoblox_client.get_ptr_record_by_ref(mock_ref) + + self.assertEqual(context.exception.response.status_code, 404) + def test_search_ipv4_address_success(self): """Test search_ipv4_address success.""" mock_ip = "10.223.0.42" @@ -705,3 +790,90 @@ def test_get_network_containers_ipv6(self): resp = self.infoblox_client.get_network_containers(ipv6=True) self.assertEqual(resp, mock_response["result"]) + + def test_get_network_views_success(self): + """Test get_network_views.""" + mock_response = get_all_network_views() + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_network_views() + + self.assertEqual(resp, mock_response) + + def test_get_network_view_success(self): + """Test get_network_view success.""" + mock_name = "dev" + mock_response = get_network_view() + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_network_view(mock_name) + + self.assertEqual(resp, mock_response) + + def test_get_network_view_fail(self): + """Test get_ptr_record_by_ref fail.""" + mock_name = "dev" + mock_response = "" + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=404) + resp = self.infoblox_client.get_network_view(mock_name) + + self.assertEqual(resp, []) + + def test_get_default_dns_view_for_network_view(self): + """Test get_default_dns_view_for_network_view success.""" + mock_name = "dev" + mock_response = get_network_view() + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_default_dns_view_for_network_view(mock_name) + + self.assertEqual(resp, "default.dev") + + def test_get_dns_view_for_network_view_from_default(self): + """Test get_dns_view_for_network_view using default view.""" + mock_name = "dev" + mock_response = get_network_view() + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_dns_view_for_network_view(mock_name) + + self.assertEqual(resp, "default.dev") + + def test_get_dns_view_for_network_view_from_config(self): + """Test get_dns_view_for_network_view using configured mapping.""" + mock_name = "dev" + mock_network_view_to_dns_map = {"dev": "dev-view"} + mock_response = get_network_view() + mock_uri = "networkview" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + with unittest.mock.patch.object( + self.infoblox_client, "network_view_to_dns_map", mock_network_view_to_dns_map + ): + resp = self.infoblox_client.get_dns_view_for_network_view(mock_name) + + self.assertEqual(resp, "dev-view") + + def test_get_authoritative_zones_for_dns_view(self): + """Test get_authoritative_zones_for_dns_view.""" + mock_view = "dev" + mock_response = get_authoritative_zones_for_dns_view() + mock_uri = "zone_auth" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_authoritative_zones_for_dns_view(mock_view) + + self.assertEqual(resp, mock_response["result"]) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index a2a8112ef..665df89f8 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -4,14 +4,16 @@ from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import ( InfobloxAdapter, - PLUGIN_CFG, ) +from .fixtures_infoblox import create_default_infoblox_config + class TestInfobloxAdapter(unittest.TestCase): """Test cases for InfobloxAdapter.""" def setUp(self): + config = create_default_infoblox_config() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: @@ -19,9 +21,9 @@ def setUp(self): job=unittest.mock.Mock(), sync=unittest.mock.Mock(), conn=mock_client, + config=config, ) - @unittest.mock.patch.dict(PLUGIN_CFG, [("infoblox_import_subnets", [])]) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", autospec=True, @@ -81,24 +83,24 @@ def test_load_prefixes_no_infoblox_import_subnets( "ranges": [], }, ] - self.infoblox_adapter.load_prefixes() + sync_filters = [{"network_view": "default"}] + self.infoblox_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) self.infoblox_adapter.conn.get_tree_from_container.assert_not_called() mock_default_extra_attrs.assert_called_once() self.assertEqual(mock_extra_attr_dict.call_count, 4) mock_build_vlan_map.assert_called_once() self.assertEqual(len(self.infoblox_adapter.get_all("prefix")), 4) - self.infoblox_adapter.conn.get_network_containers.assert_has_calls([unittest.mock.call()]) - self.infoblox_adapter.conn.get_all_subnets.assert_has_calls([unittest.mock.call()]) - subnet_with_attrs = self.infoblox_adapter.get("prefix", "10.0.0.0/23") + self.infoblox_adapter.conn.get_network_containers.assert_has_calls([unittest.mock.call(network_view="default")]) + self.infoblox_adapter.conn.get_all_subnets.assert_has_calls([unittest.mock.call(network_view="default")]) + subnet_with_attrs = self.infoblox_adapter.get("prefix", "10.0.0.0/23__Global") self.assertEqual(subnet_with_attrs.ext_attrs, {"attr1": "data", "attr2": "value"}) self.assertEqual(subnet_with_attrs.vlans, {10: {"vid": 10, "name": "ten", "group": "group_a"}}) self.assertEqual(subnet_with_attrs.ranges, ["10.0.0.150-10.0.0.254", "10.0.1.150-10.0.1.254"]) - subnet_without_attrs = self.infoblox_adapter.get("prefix", "10.0.100.0/24") + subnet_without_attrs = self.infoblox_adapter.get("prefix", "10.0.100.0/24__Global") self.assertEqual(subnet_without_attrs.ext_attrs, {"attr1": "data"}) self.assertEqual(subnet_without_attrs.vlans, {}) self.assertEqual(subnet_without_attrs.ranges, []) - @unittest.mock.patch.dict(PLUGIN_CFG, [("infoblox_import_subnets", ["10.0.0.0/8", "192.168.0.0/16"])]) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", autospec=True, @@ -164,24 +166,29 @@ def test_load_prefixes_with_infoblox_import_subnets( ] self.infoblox_adapter.conn.get_all_subnets.side_effect = [one_nine_two_network] self.infoblox_adapter.conn.remove_duplicates.side_effect = [ten_network + one_nine_two_network, ten_container] - self.infoblox_adapter.load_prefixes() + sync_filters = [{"network_view": "default", "prefixes_ipv4": ["10.0.0.0/8", "192.168.0.0/16"]}] + self.infoblox_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) self.infoblox_adapter.conn.get_tree_from_container.assert_has_calls( - [unittest.mock.call("10.0.0.0/8"), unittest.mock.call("192.168.0.0/16")] + [ + unittest.mock.call(root_container="10.0.0.0/8", network_view="default"), + unittest.mock.call(root_container="192.168.0.0/16", network_view="default"), + ] ) self.assertEqual(self.infoblox_adapter.conn.get_tree_from_container.call_count, 2) self.infoblox_adapter.conn.get_child_subnets_from_container.assert_has_calls( - [unittest.mock.call(prefix="10.0.0.0/8"), unittest.mock.call(prefix="10.0.0.0/16")] + [ + unittest.mock.call(prefix="10.0.0.0/8", network_view="default"), + unittest.mock.call(prefix="10.0.0.0/16", network_view="default"), + ] ) self.assertEqual(self.infoblox_adapter.conn.get_child_subnets_from_container.call_count, 2) self.infoblox_adapter.conn.get_all_subnets.assert_called_once() - self.infoblox_adapter.conn.get_all_subnets.assert_called_with("192.168.0.0/16") - self.assertEqual(self.infoblox_adapter.conn.remove_duplicates.call_count, 2) + self.infoblox_adapter.conn.get_all_subnets.assert_called_with("192.168.0.0/16", network_view="default") mock_default_extra_attrs.assert_called_once() self.assertEqual(mock_extra_attr_dict.call_count, 4) mock_build_vlan_map.assert_not_called() self.assertEqual(len(self.infoblox_adapter.get_all("prefix")), 4) - @unittest.mock.patch.dict(PLUGIN_CFG, [("infoblox_import_subnets", [])]) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", autospec=True, @@ -223,21 +230,18 @@ def test_load_prefixes_add_duplicate_prefix( "ranges": [], }, ] - error_message = ( - "Duplicate prefix found: 10.0.0.0/23. Duplicate prefixes are not supported, " - "and only the first occurrence will be included in the sync. To load data " - "from a single Network View, use the 'infoblox_network_view' setting." - ) - self.infoblox_adapter.load_prefixes() + error_message = "Duplicate prefix found: 10.0.0.0/23__Global." + sync_filters = [{"network_view": "default"}] + self.infoblox_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) self.infoblox_adapter.job.logger.warning.assert_called_once() self.infoblox_adapter.job.logger.warning.assert_called_with(error_message) mock_build_vlan_map.assert_not_called() self.assertEqual(mock_extra_attr_dict.call_count, 2) mock_default_extra_attrs.assert_called_once() - @unittest.mock.patch.dict( - PLUGIN_CFG, [("infoblox_import_subnets", []), ("infoblox_import_objects_subnets_ipv6", True)] - ) + # @unittest.mock.patch.dict( + # PLUGIN_CFG, [("infoblox_import_subnets", []), ("infoblox_import_objects_subnets_ipv6", True)] + # ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", autospec=True, @@ -322,19 +326,20 @@ def test_load_prefixes_ipv6_subnets( }, ], ] - self.infoblox_adapter.load_prefixes() + sync_filters = [{"network_view": "default"}] + self.infoblox_adapter.load_prefixes(include_ipv4=True, include_ipv6=True, sync_filters=sync_filters) self.infoblox_adapter.conn.get_tree_from_container.assert_not_called() mock_default_extra_attrs.assert_called_once() self.assertEqual(mock_extra_attr_dict.call_count, 6) mock_build_vlan_map.assert_called_once() self.assertEqual(len(self.infoblox_adapter.get_all("prefix")), 6) self.infoblox_adapter.conn.get_network_containers.assert_has_calls( - [unittest.mock.call(), unittest.mock.call(ipv6=True)] + [unittest.mock.call(network_view="default"), unittest.mock.call(network_view="default", ipv6=True)] ) self.infoblox_adapter.conn.get_all_subnets.assert_has_calls( - [unittest.mock.call(), unittest.mock.call(ipv6=True)] + [unittest.mock.call(network_view="default"), unittest.mock.call(network_view="default", ipv6=True)] ) - ipv6_subnet = self.infoblox_adapter.get("prefix", "2001:5b0:4100::/40") + ipv6_subnet = self.infoblox_adapter.get("prefix", "2001:5b0:4100::/40__Global") self.assertEqual(ipv6_subnet.ext_attrs, {"attr1": "data"}) self.assertEqual(ipv6_subnet.vlans, {}) self.assertEqual(ipv6_subnet.ranges, []) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 023a5fe5d..218b550f6 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -7,7 +7,7 @@ from nautobot.ipam.models import Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter -from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_prefix_relationship +from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship class TestNautobotAdapter(TestCase): @@ -66,7 +66,9 @@ def setUp(self): status=active_status, type="Network", ) - self.nb_adapter = NautobotAdapter() + self.config = create_default_infoblox_config() + self.sync_filters = self.config.infoblox_sync_filters + self.nb_adapter = NautobotAdapter(config=self.config) def test_load_vlans_loads_expected_vlans(self): self.nb_adapter.load_vlans() @@ -80,16 +82,16 @@ def test_load_vlans_does_not_load_ungrouped_vlans(self): self.assertFalse(10 in actual_vlan_ids) def test_load_prefixes_loads_prefixes(self): - self.nb_adapter.load_prefixes() + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) actual_prefixes = {prefix.network for prefix in self.nb_adapter.get_all("prefix")} self.assertEqual(actual_prefixes, {"10.0.0.0/24", "10.0.1.0/24"}) def test_load_prefixes_loads_prefixes_and_vlan_relationship(self): - self.nb_adapter.load_prefixes() - prefix_with_vlan = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24"}) + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) + prefix_with_vlan = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24", "namespace": "Global"}) self.assertEqual({10: {"vid": 10, "name": "ten", "group": None}}, prefix_with_vlan.vlans) def test_load_prefixes_loads_ranges(self): - self.nb_adapter.load_prefixes() - prefix_with_ranges = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24"}) + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) + prefix_with_ranges = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24", "namespace": "Global"}) self.assertEqual(["10.0.0.50-10.0.0.254"], prefix_with_ranges.ranges) diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index 3df84923d..c935399b6 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -13,6 +13,8 @@ from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_prefix_relationship +from .fixtures_infoblox import create_default_infoblox_config + class TestTagging(TestCase): """Tests ensuring tagging is applied to objects synced from and to Infoblox.""" @@ -39,6 +41,7 @@ def setUp(self): ) for model in [IPAddress, Prefix, VLAN]: self.tag_sync_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) + self.config = create_default_infoblox_config() def test_tags_have_correct_content_types_set(self): """Ensure tags have correct content types configured.""" @@ -49,18 +52,25 @@ def test_tags_have_correct_content_types_set(self): def test_objects_synced_from_infoblox_are_tagged(self): """Ensure objects synced from Infoblox have 'SSoT Synced from Infoblox' tag applied.""" - nb_diffsync = NautobotAdapter() + nb_diffsync = NautobotAdapter(config=self.config) nb_diffsync.job = Mock() nb_diffsync.load() - infoblox_adapter = InfobloxAdapter(conn=Mock()) + infoblox_adapter = InfobloxAdapter(conn=Mock(), config=self.config) + ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(ds_namespace) ds_prefix = infoblox_adapter.prefix( network="10.0.0.0/8", description="Test Network", network_type="network", ext_attrs={}, vlans={}, + status="Active", + namespace="Global", ) infoblox_adapter.add(ds_prefix) ds_ipaddress = infoblox_adapter.ipaddress( @@ -72,6 +82,7 @@ def test_objects_synced_from_infoblox_are_tagged(self): prefix_length=8, ip_addr_type="host", ext_attrs={}, + namespace="Global", ) infoblox_adapter.add(ds_ipaddress) ds_vlangroup = infoblox_adapter.vlangroup(name="TestVLANGroup", description="", ext_attrs={}) @@ -128,12 +139,17 @@ def test_objects_synced_to_infoblox_are_tagged(self): ) nb_vlan.validated_save() - nautobot_adapter = NautobotAdapter() + nautobot_adapter = NautobotAdapter(config=self.config) nautobot_adapter.job = Mock() nautobot_adapter.load() - infoblox_adapter = InfobloxAdapter(conn=Mock()) + infoblox_adapter = InfobloxAdapter(conn=Mock(), config=self.config) infoblox_adapter.job = Mock() + ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(ds_namespace) nautobot_adapter.sync_to(infoblox_adapter) prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") @@ -162,6 +178,7 @@ def setUp(self): for model in [IPAddress, Prefix, VLAN, VLANGroup]: self.cf_synced_to_infoblox.content_types.add(ContentType.objects.get_for_model(model)) create_prefix_relationship() + self.config = create_default_infoblox_config() def test_cfs_have_correct_content_types_set(self): """Ensure cfs have correct content types configured.""" @@ -201,13 +218,20 @@ def test_cf_updated_for_objects_synced_to_infoblox(self): ) nb_vlan.validated_save() - nautobot_adapter = NautobotAdapter() + nautobot_adapter = NautobotAdapter(config=self.config) nautobot_adapter.job = Mock() nautobot_adapter.load() conn = Mock() - infoblox_adapter = InfobloxAdapter(conn=conn) + infoblox_adapter = InfobloxAdapter(conn=conn, config=self.config) infoblox_adapter.job = Mock() + + ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(ds_namespace) + nautobot_adapter.sync_to(infoblox_adapter) prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") From b7cd3357dd822a598db824cb3da2fadeb0a5a8d8 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:42:41 +0100 Subject: [PATCH 046/229] Update job to take SSOTInfobloxConfig objects. --- nautobot_ssot/integrations/infoblox/jobs.py | 73 ++++++++++++++++++--- 1 file changed, 63 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index 89ae8b2ab..4c011a8bb 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -3,21 +3,55 @@ from diffsync.enum import DiffSyncFlags from django.templatetags.static import static from django.urls import reverse +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.jobs import BooleanVar +from nautobot.apps.jobs import ObjectVar from nautobot_ssot.jobs.base import DataMapping, DataSource, DataTarget +from nautobot_ssot.models import SSOTInfobloxConfig from .diffsync.adapters import infoblox, nautobot from .utils.client import InfobloxApi -from .constant import PLUGIN_CFG name = "SSoT - Infoblox DDI" # pylint: disable=invalid-name +def _get_infoblox_client_config(app_config, debug): + """Get Infoblox client config from the Infoblox config instance.""" + username = app_config.infoblox_instance.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + password = app_config.infoblox_instance.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + infoblox_client_config = { + "url": app_config.infoblox_instance.remote_url, + "username": username, + "password": password, + "verify_ssl": app_config.infoblox_instance.verify_ssl, + "wapi_version": app_config.infoblox_wapi_version, + "timeout": app_config.infoblox_instance.timeout, + "debug": debug, + "network_view_to_dns_map": app_config.infoblox_dns_view_mapping, + } + + return infoblox_client_config + + class InfobloxDataSource(DataSource): """Infoblox SSoT Data Source.""" debug = BooleanVar(description="Enable for verbose debug logging.") + config = ObjectVar( + model=SSOTInfobloxConfig, + display_field="SSOT Infoblox config", + required=True, + query_params={ + "job_enabled": True, + }, + ) def __init__(self): """Initialize InfobloxDataSource.""" @@ -46,15 +80,17 @@ def data_mappings(cls): def load_source_adapter(self): """Load Infoblox data.""" self.logger.info("Connecting to Infoblox") - client = InfobloxApi() - self.source_adapter = infoblox.InfobloxAdapter(job=self, sync=self.sync, conn=client) + client_config = _get_infoblox_client_config(self.config, self.debug) + self.logger.info(client_config) + client = InfobloxApi(**client_config) + self.source_adapter = infoblox.InfobloxAdapter(job=self, sync=self.sync, conn=client, config=self.config) self.logger.info("Loading data from Infoblox...") self.source_adapter.load() def load_target_adapter(self): """Load Nautobot data.""" self.logger.info("Connecting to Nautobot...") - self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync, config=self.config) self.logger.info("Loading data from Nautobot...") self.target_adapter.load() @@ -62,6 +98,7 @@ def run(self, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disa """Perform data synchronization.""" self.debug = debug self.dryrun = dryrun + self.config = kwargs.get("config") self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) @@ -70,6 +107,15 @@ class InfobloxDataTarget(DataTarget): """Infoblox SSoT Data Target.""" debug = BooleanVar(description="Enable for verbose debug logging.") + config = ObjectVar( + model=SSOTInfobloxConfig, + display_field="SSOT Infoblox config", + required=True, + query_params={ + "enable_sync_to_infoblox": True, + "job_enabled": True, + }, + ) def __init__(self): """Initialize InfobloxDataTarget.""" @@ -88,6 +134,7 @@ class Meta: # pylint: disable=too-few-public-methods def data_mappings(cls): """Show mapping of models between Nautobot and Infoblox.""" return ( + DataMapping("Namespace", reverse("ipam:namespace_list"), "network_view", None), DataMapping("Prefix", reverse("ipam:prefix_list"), "network", None), DataMapping("IP Address", reverse("ipam:ipaddress_list"), "ipaddress", None), DataMapping("VLAN", reverse("ipam:vlan_list"), "vlan", None), @@ -97,15 +144,16 @@ def data_mappings(cls): def load_source_adapter(self): """Load Nautobot data.""" self.logger.info("Connecting to Nautobot...") - self.source_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.source_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync, config=self.config) self.logger.info("Loading data from Nautobot...") self.source_adapter.load() def load_target_adapter(self): """Load Infoblox data.""" self.logger.info("Connecting to Infoblox") - client = InfobloxApi() - self.target_adapter = infoblox.InfobloxAdapter(job=self, sync=self.sync, conn=client) + client_config = _get_infoblox_client_config(self.config, self.debug) + client = InfobloxApi(**client_config) + self.target_adapter = infoblox.InfobloxAdapter(job=self, sync=self.sync, conn=client, config=self.config) self.logger.info("Loading data from Infoblox...") self.target_adapter.load() @@ -113,11 +161,16 @@ def run(self, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disa """Perform data synchronization.""" self.debug = debug self.dryrun = dryrun + self.config = kwargs.get("config") + # Additional guard against launching sync to Infoblox with config that doesn't allow it + if not self.config.enable_sync_to_infoblox: + self.logger.error("Can't run sync to Infoblox, provided config doesn't have it enabled...") + return self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) -jobs = [InfobloxDataSource] +jobs = [InfobloxDataSource, InfobloxDataTarget] -if PLUGIN_CFG["enable_sync_to_infoblox"]: - jobs.append(InfobloxDataTarget) +# if PLUGIN_CFG["enable_sync_to_infoblox"]: +# jobs.append(InfobloxDataTarget) From 0b60d060242ab0c6f2f90769ebeed058b2c154c3 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:43:35 +0100 Subject: [PATCH 047/229] Migrate existing settings to SSOTInfobloxConfig object. --- .../integrations/infoblox/signals.py | 130 +++++++++++++++++- 1 file changed, 129 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index 57667afdb..9e089a280 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -2,11 +2,22 @@ # pylint: disable=duplicate-code +import ipaddress + from nautobot.core.signals import nautobot_database_ready -from nautobot.extras.choices import CustomFieldTypeChoices, RelationshipTypeChoices +from nautobot.extras.choices import ( + CustomFieldTypeChoices, + RelationshipTypeChoices, + SecretsGroupAccessTypeChoices, + SecretsGroupSecretTypeChoices, +) +from django.conf import settings from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR +config = settings.PLUGINS_CONFIG["nautobot_ssot"] + + def register_signals(sender): """Register signals for Infoblox integration.""" nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) @@ -24,8 +35,14 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa IPAddress = apps.get_model("ipam", "IPAddress") Tag = apps.get_model("extras", "Tag") Relationship = apps.get_model("extras", "Relationship") + ExternalIntegration = apps.get_model("extras", "ExternalIntegration") + Secret = apps.get_model("extras", "Secret") + SecretsGroup = apps.get_model("extras", "SecretsGroup") + SecretsGroupAssociation = apps.get_model("extras", "SecretsGroupAssociation") + Status = apps.get_model("extras", "Status") VLAN = apps.get_model("ipam", "VLAN") VLANGroup = apps.get_model("ipam", "VLANGroup") + SSOTInfobloxConfig = apps.get_model("nautobot_ssot", "SSOTInfobloxConfig") tag_sync_from_infoblox, _ = Tag.objects.get_or_create( name="SSoT Synced from Infoblox", @@ -76,3 +93,114 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "destination_label": "VLAN", } Relationship.objects.get_or_create(label=relationship_dict["label"], defaults=relationship_dict) + + # Migrate existing configuration to a configuration object + if not SSOTInfobloxConfig.objects.exists(): + default_status_name = str(config.get("infoblox_default_status", "")) + found_status = Status.objects.filter(name=default_status_name) + if found_status.exists(): + default_status = found_status.first() + else: + default_status = Status.objects.get(name="Active") + + try: + infoblox_request_timeout = int(config.get("infoblox_request_timeout", 60)) + except ValueError: + infoblox_request_timeout = 60 + + infoblox_sync_filters = _get_sync_filters() + + secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTMigration") + infoblox_username, _ = Secret.objects.get_or_create( + name="Infoblox Username - SSOT Migration", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_USERNAME"}, + }, + ) + infoblox_password, _ = Secret.objects.get_or_create( + name="Infoblox Password - SSOT Migration", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_PASSWORD"}, + }, + ) + SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + defaults={ + "secret": infoblox_username, + }, + ) + SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + defaults={ + "secret": infoblox_password, + }, + ) + external_integration = ExternalIntegration.objects.create( + name="MigratedInfobloxInstance", + remote_url=str(config.get("infoblox_url", "https://replace.me.local")), + secrets_group=secrets_group, + verify_ssl=bool(config.get("infoblox_verify_ssl", True)), + timeout=infoblox_request_timeout, + ) + + SSOTInfobloxConfig.objects.create( + name="InfobloxConfigDefault", + description="Config generated from the migrated legacy settings.", + default_status=default_status, + infoblox_wapi_version=str(config.get("infoblox_wapi_version", "v2.12")), + infoblox_instance=external_integration, + enable_sync_to_infoblox=bool(config.get("infoblox_enable_sync_to_infoblox", False)), + import_ip_addresses=bool(config.get("infoblox_import_objects_ip_addresses", False)), + import_subnets=bool(config.get("infoblox_import_objects_subnets", False)), + import_vlan_views=bool(config.get("infoblox_import_objects_vlan_views", False)), + import_vlans=bool(config.get("infoblox_import_objects_vlans", False)), + import_ipv4=True, + import_ipv6=bool(config.get("infoblox_import_objects_subnets_ipv6", False)), + job_enabled=True, + infoblox_sync_filters=infoblox_sync_filters, + ) + + +def _get_sync_filters(): + """Build sync filters from the existing config.""" + subnets_to_import = config.get("infoblox_import_subnets", []) + default_sync_filters = [{"network_view": "default"}] + ipv4_subnets = [] + ipv6_subnets = [] + if not subnets_to_import: + return default_sync_filters + if not isinstance(subnets_to_import, list): + return default_sync_filters + for subnet in subnets_to_import: + try: + ipaddress.IPv4Network(subnet) + ipv4_subnets.append(subnet) + except (ValueError, TypeError): + pass + try: + ipaddress.IPv6Network(subnet) + ipv6_subnets.append(subnet) + except (ValueError, TypeError): + pass + + sync_filter = {} + if ipv4_subnets: + sync_filter["prefixes_ipv4"] = ipv4_subnets + if ipv6_subnets: + sync_filter["prefixes_ipv6"] = ipv6_subnets + + network_view = str(config.get("infoblox_network_view", "")) + if network_view: + sync_filter["network_view"] = network_view + else: + sync_filter["network_view"] = "default" + + sync_filters = [sync_filter] + + return sync_filters From 44fd449226a4503092808b061fd748d5e82b87d0 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:48:16 +0100 Subject: [PATCH 048/229] Add code for SSOTInfobloxConfig model. --- .../integrations/infoblox/filters.py | 28 ++ nautobot_ssot/integrations/infoblox/forms.py | 43 +++ nautobot_ssot/integrations/infoblox/models.py | 291 ++++++++++++++++++ nautobot_ssot/integrations/infoblox/tables.py | 39 +++ nautobot_ssot/integrations/infoblox/urls.py | 26 ++ nautobot_ssot/integrations/infoblox/views.py | 57 ++++ 6 files changed, 484 insertions(+) create mode 100644 nautobot_ssot/integrations/infoblox/filters.py create mode 100644 nautobot_ssot/integrations/infoblox/forms.py create mode 100644 nautobot_ssot/integrations/infoblox/models.py create mode 100644 nautobot_ssot/integrations/infoblox/tables.py create mode 100644 nautobot_ssot/integrations/infoblox/urls.py create mode 100644 nautobot_ssot/integrations/infoblox/views.py diff --git a/nautobot_ssot/integrations/infoblox/filters.py b/nautobot_ssot/integrations/infoblox/filters.py new file mode 100644 index 000000000..122b4a8b9 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/filters.py @@ -0,0 +1,28 @@ +"""Filtering implementation for SSOT Infoblox.""" + +import django_filters + +from django.db.models import Q +from nautobot.apps.filters import NautobotFilterSet + + +from .models import SSOTInfobloxConfig + + +class SSOTInfobloxConfigFilterSet(NautobotFilterSet): + """FilterSet for SSOTInfobloxConfig model.""" + + q = django_filters.CharFilter(method="search", label="Search") + + class Meta: + """Meta attributes for filter.""" + + model = SSOTInfobloxConfig + + fields = "__all__" + + def search(self, queryset, _name, value): + """String search of SSOTInfobloxConfig records.""" + if not value.strip(): + return queryset + return queryset.filter(Q(name__icontains=value)) # pylint: disable=unsupported-binary-operation diff --git a/nautobot_ssot/integrations/infoblox/forms.py b/nautobot_ssot/integrations/infoblox/forms.py new file mode 100644 index 000000000..e87510815 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/forms.py @@ -0,0 +1,43 @@ +"""Forms implementation for SSOT Infoblox.""" + +from nautobot.extras.forms import NautobotModelForm, NautobotFilterForm +from nautobot.apps.forms import JSONField + +from .models import SSOTInfobloxConfig + + +class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-ancestors + """SSOTInfobloxConfig creation/edit form.""" + + infoblox_sync_filters = JSONField( + required=True, label="Infoblox Sync Filters", help_text="Filters controlling data loaded from both systems." + ) + infoblox_dns_view_mapping = JSONField( + required=False, + label="Infoblox Network View to DNS Mapping", + help_text="Maps Network View to a single DNS View. This DNS View is used when creating DNS records.", + ) + cf_fields_ignore = JSONField( + required=False, + label="Extensible Attributes/Custom Fields to Ignore", + help_text="Provide list of Extensible Attributes and Custom Fields to ignore during sync." + " Assign lists to keys `extensible_attributes` and `custom_fields`.", + ) + + class Meta: + """Meta attributes for the SSOTInfobloxConfigForm class.""" + + model = SSOTInfobloxConfig + fields = "__all__" + + +class SSOTInfobloxConfigFilterForm(NautobotFilterForm): + """Filter form for SSOTInfobloxConfig filter searches.""" + + model = SSOTInfobloxConfig + + class Meta: + """Meta attributes for the SSOTInfobloxConfigFilterForm class.""" + + model = SSOTInfobloxConfig + fields = "__all__" diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py new file mode 100644 index 000000000..beae69257 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -0,0 +1,291 @@ +"""Models implementation for SSOT Infoblox.""" + +import ipaddress + +from django.core.exceptions import ValidationError + +from django.core.serializers.json import DjangoJSONEncoder +from django.db import models + +from nautobot.core.models.generics import PrimaryModel +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot.extras.models import SecretsGroupAssociation + + +def _get_default_sync_filters(): + """Provides default value for infoblox_sync_filters field.""" + return [{"network_view": "default"}] + + +class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors + """SSOT Infoblox Configuration model.""" + + name = models.CharField(max_length=255, unique=True) + description = models.CharField( + max_length=255, + blank=True, + ) + default_status = models.ForeignKey( + to="extras.Status", + on_delete=models.PROTECT, + verbose_name="Default Object Status", + help_text="Status", + ) + infoblox_instance = models.ForeignKey( + to="extras.ExternalIntegration", + on_delete=models.PROTECT, + verbose_name="Infoblox Instance Config", + help_text="Infoblox Instance", + ) + infoblox_wapi_version = models.CharField( + max_length=255, + default="v2.12", + verbose_name="Infoblox WAPI version", + ) + enable_sync_to_infoblox = models.BooleanField( + default=False, verbose_name="Sync to Infoblox", help_text="Enable syncing of data from Nautobot to Infoblox." + ) + import_ip_addresses = models.BooleanField( + default=False, + verbose_name="Import IP Addresses", + ) + import_subnets = models.BooleanField( + default=False, + verbose_name="Import Networks", + ) + import_vlan_views = models.BooleanField( + default=False, + verbose_name="Import VLAN Views", + ) + import_vlans = models.BooleanField( + default=False, + verbose_name="Import VLANs", + ) + infoblox_sync_filters = models.JSONField(default=_get_default_sync_filters, encoder=DjangoJSONEncoder) + infoblox_dns_view_mapping = models.JSONField(default=dict, encoder=DjangoJSONEncoder) + cf_fields_ignore = models.JSONField(default=dict, encoder=DjangoJSONEncoder) + import_ipv4 = models.BooleanField( + default=True, + verbose_name="Import IPv4", + ) + import_ipv6 = models.BooleanField( + default=False, + verbose_name="Import IPv6", + ) + create_host_record = models.BooleanField( + default=True, + verbose_name="Create Host Record", + help_text="Infoblox - Create IP Address as Host Record", + ) + create_a_record = models.BooleanField( + default=False, + verbose_name="Create A Record", + help_text="Infoblox - Create IP Address as A Record", + ) + create_ptr_record = models.BooleanField( + default=False, + verbose_name="Create PTR Record", + help_text="Infoblox - Create PTR Record for IP Address", + ) + job_enabled = models.BooleanField( + default=False, + verbose_name="Enabled for Sync Job", + ) + + class Meta: + """Meta class for SSOTInfobloxConfig.""" + + verbose_name = "SSOT Infoblox Config" + verbose_name_plural = "SSOT Infoblox Configs" + + def __str__(self): + """String representation of singleton instance.""" + return self.name + + def _clean_infoblox_sync_filters(self): # pylint: disable=too-many-branches + """Performs validation of the infoblox_sync_filters field.""" + allowed_keys = {"network_view", "prefixes_ipv4", "prefixes_ipv6"} + + if not isinstance(self.infoblox_sync_filters, list): + raise ValidationError({"infoblox_sync_filters": "Sync filters must be a list."}) + + if len(self.infoblox_sync_filters) == 0: + raise ValidationError( + { + "infoblox_sync_filters": 'At least one filter must be defined. You can use the default one: [{"network_view": "default"}]' + } + ) + + network_views = set() + for sync_filter in self.infoblox_sync_filters: + if not isinstance(sync_filter, dict): + raise ValidationError({"infoblox_sync_filters": "Sync filter must be a dict."}) + invalid_keys = set(sync_filter.keys()) - allowed_keys + if invalid_keys: + raise ValidationError( + {"infoblox_sync_filters": f"Invalid keys found in the sync filter: {''.join(invalid_keys)}."} + ) + + if "network_view" not in sync_filter: + raise ValidationError({"infoblox_sync_filters": "Sync filter must have `network_view` key defined."}) + network_view = sync_filter["network_view"] + if not isinstance(network_view, str): + raise ValidationError({"infoblox_sync_filters": "Value of the `network_view` key must be a string."}) + + if network_view in network_views: + raise ValidationError( + { + "infoblox_sync_filters": f"Duplicate value for the `network_view` found: {sync_filter['network_view']}." + } + ) + network_views.add(network_view) + + if "prefixes_ipv4" in sync_filter: + if not isinstance(sync_filter["prefixes_ipv4"], list): + raise ValidationError({"infoblox_sync_filters": "Value of the `prefixes_ipv4` key must be a list."}) + if not sync_filter["prefixes_ipv4"]: + raise ValidationError( + {"infoblox_sync_filters": "Value of the `prefixes_ipv4` key must not be an empty list."} + ) + for prefix in sync_filter["prefixes_ipv4"]: + try: + if "/" not in prefix: + raise ValidationError( + {"infoblox_sync_filters": f"IPv4 prefix must have a prefix length: {prefix}."} + ) + ipaddress.IPv4Network(prefix) + except (ValueError, TypeError) as error: + raise ValidationError( # pylint: disable=raise-missing-from + {"infoblox_sync_filters": f"IPv4 prefix parsing error: {str(error)}."} + ) + + if "prefixes_ipv6" in sync_filter: + if not isinstance(sync_filter["prefixes_ipv6"], list): + raise ValidationError({"infoblox_sync_filters": "Value of the `prefixes_ipv6` key must be a list."}) + if not sync_filter["prefixes_ipv6"]: + raise ValidationError( + {"infoblox_sync_filters": "Value of the `prefixes_ipv6` key must not be an empty list."} + ) + for prefix in sync_filter["prefixes_ipv6"]: + try: + if "/" not in prefix: + raise ValidationError( + {"infoblox_sync_filters": f"IPv6 prefix must have a prefix length: {prefix}."} + ) + ipaddress.IPv6Network(prefix) + except (ValueError, TypeError) as error: + raise ValidationError( # pylint: disable=raise-missing-from + {"infoblox_sync_filters": f"IPv6 prefix parsing error: {str(error)}."} + ) + + def _clean_secrets_group(self): + """Performs validation of the secrets_group field.""" + if not self.infoblox_instance.secrets_group: + raise ValidationError({"secrets_group": "Infoblox instance must have Secrets groups assigned."}) + try: + self.infoblox_instance.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + except SecretsGroupAssociation.DoesNotExist: + raise ValidationError( # pylint: disable=raise-missing-from + { + "secrets_group": "Secrets group for the Infoblox instance must have secret with type Username and access type REST." + } + ) + try: + self.infoblox_instance.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + except SecretsGroupAssociation.DoesNotExist: + raise ValidationError( # pylint: disable=raise-missing-from + { + "secrets_group": "Secrets group for the Infoblox instance must have secret with type Password and access type REST." + } + ) + + def _clean_import_ip(self): + """Performs validation of the import_ipv* fields.""" + if not (self.import_ipv4 or self.import_ipv6): + raise ValidationError( + { + "import_ipv4": "At least one of `import_ipv4` or `import_ipv6` must be set to True.", + "import_ipv6": "At least one of `import_ipv4` or `import_ipv6` must be set to True.", + } + ) + + def _clean_ip_address_create_options(self): + """Performs validation of the Infoblox IP Address creation options.""" + if self.create_a_record and self.create_host_record: + raise ValidationError( + { + "create_a_record": "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", + "create_host_record": "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", + }, + ) + if self.create_host_record and self.create_ptr_record: + raise ValidationError( + { + "create_host_record": "`create_ptr_record` can be used with `create_a_record` only.", + "create_ptr_record": "`create_ptr_record` can be used with `create_a_record` only.", + }, + ) + + if self.create_ptr_record and not self.create_a_record: + raise ValidationError( + {"create_ptr_record": "To use `create_ptr_record` you must enable `create_a_record`."}, + ) + + if not (self.create_a_record or self.create_host_record): + raise ValidationError( + { + "create_a_record": "Either `create_a_record` or `create_host_record` must be enabled.", + "create_host_record": "Either `create_a_record` or `create_host_record` must be enabled.", + }, + ) + + def _clean_infoblox_dns_view_mapping(self): + """Performs validation of the infoblox_dns_view_mapping field.""" + if not isinstance(self.infoblox_dns_view_mapping, dict): + raise ValidationError( + { + "infoblox_dns_view_mapping": "`infoblox_dns_view_mapping` must be a dictionary mapping network view names to dns view names.", + }, + ) + + def _clean_cf_fields_ignore(self): + """Performs validation of the cf_fields_ignore field.""" + if not isinstance(self.cf_fields_ignore, dict): + raise ValidationError( + { + "cf_fields_ignore": "`cf_fields_ignore` must be a dictionary.", + }, + ) + for key, value in self.cf_fields_ignore.items(): + if key not in ( + "extensible_attributes", + "custom_fields", + ): + raise ValidationError( + { + "cf_fields_ignore": f"Invalid key name `{key}`. Only `extensible_attributes` and `custom_fields` are allowed.", + }, + ) + if not isinstance(value, list) or {type(el) for el in value} - {str}: + raise ValidationError( + { + "cf_fields_ignore": f"Value of key `{key}` must be a list of strings.", + }, + ) + + def clean(self): + """Clean method for SSOTInfobloxConfig.""" + super().clean() + + self._clean_infoblox_sync_filters() + self._clean_secrets_group() + self._clean_import_ip() + self._clean_ip_address_create_options() + self._clean_infoblox_dns_view_mapping() + self._clean_cf_fields_ignore() diff --git a/nautobot_ssot/integrations/infoblox/tables.py b/nautobot_ssot/integrations/infoblox/tables.py new file mode 100644 index 000000000..e6739e541 --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/tables.py @@ -0,0 +1,39 @@ +"""Tables implementation for SSOT Infoblox.""" +import django_tables2 as tables + +from nautobot.apps.tables import BaseTable, BooleanColumn, ButtonsColumn + +from .models import SSOTInfobloxConfig + + +class SSOTInfobloxConfigTable(BaseTable): + """Table for SSOTInfobloxConfig.""" + + name = tables.LinkColumn() + infoblox_url = tables.Column(accessor="infoblox_instance__remote_url") + enable_sync_to_infoblox = BooleanColumn(orderable=False) + import_subnets = BooleanColumn(orderable=False) + import_ip_addresses = BooleanColumn(orderable=False) + import_vlan_views = BooleanColumn(orderable=False) + import_vlans = BooleanColumn(orderable=False) + import_ipv4 = BooleanColumn(orderable=False) + import_ipv6 = BooleanColumn(orderable=False) + job_enabled = BooleanColumn(orderable=False) + actions = ButtonsColumn(SSOTInfobloxConfig, buttons=("changelog", "edit", "delete")) + + class Meta(BaseTable.Meta): + """Meta attributes.""" + + model = SSOTInfobloxConfig + fields = ( + "name", + "infoblox_url", + "enable_sync_to_infoblox", + "import_subnets", + "import_ip_addresses", + "import_vlan_views", + "import_vlans", + "import_ipv4", + "import_ipv6", + "job_enabled", + ) diff --git a/nautobot_ssot/integrations/infoblox/urls.py b/nautobot_ssot/integrations/infoblox/urls.py new file mode 100644 index 000000000..53183c80b --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/urls.py @@ -0,0 +1,26 @@ +"""URL patterns for nautobot-ssot-servicenow.""" +from django.urls import path +from nautobot.apps.urls import NautobotUIViewSetRouter + +from . import views +from . import models + +router = NautobotUIViewSetRouter() +router.register("config/infoblox", viewset=views.SSOTInfobloxConfigUIViewSet) + +urlpatterns = [ + path( + "config/infoblox//changelog/", + views.SSOTInfobloxConfigChangeLogView.as_view(), + name="ssotinfobloxconfig_changelog", + kwargs={"model": models.SSOTInfobloxConfig}, + ), + path( + "config/infoblox//notes/", + views.SSOTInfobloxConfigNotesView.as_view(), + name="ssotinfobloxconfig_notes", + kwargs={"model": models.SSOTInfobloxConfig}, + ), +] + +urlpatterns += router.urls diff --git a/nautobot_ssot/integrations/infoblox/views.py b/nautobot_ssot/integrations/infoblox/views.py new file mode 100644 index 000000000..46d2b691e --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/views.py @@ -0,0 +1,57 @@ +"""Views implementation for SSOT Infoblox.""" +from nautobot.extras.views import ObjectChangeLogView, ObjectNotesView +from nautobot.apps.views import ( + ObjectDestroyViewMixin, + ObjectDetailViewMixin, + ObjectEditViewMixin, + ObjectListViewMixin, +) + +from .api.serializers import SSOTInfobloxConfigSerializer +from .filters import SSOTInfobloxConfigFilterSet +from .forms import SSOTInfobloxConfigFilterForm, SSOTInfobloxConfigForm +from .models import SSOTInfobloxConfig +from .tables import SSOTInfobloxConfigTable + + +class SSOTInfobloxConfigUIViewSet( + ObjectDestroyViewMixin, ObjectDetailViewMixin, ObjectListViewMixin, ObjectEditViewMixin +): # pylint: disable=abstract-method + """SSOTInfobloxConfig UI ViewSet.""" + + queryset = SSOTInfobloxConfig.objects.all() + table_class = SSOTInfobloxConfigTable + filterset_class = SSOTInfobloxConfigFilterSet + filterset_form_class = SSOTInfobloxConfigFilterForm + form_class = SSOTInfobloxConfigForm + serializer_class = SSOTInfobloxConfigSerializer + lookup_field = "pk" + action_buttons = ("add",) + + def get_template_name(self): + """Override inherited method to allow custom location for templates.""" + action = self.action + app_label = "nautobot_ssot_infoblox" + model_opts = self.queryset.model._meta + if action in ["create", "update"]: + template_name = f"{app_label}/{model_opts.model_name}_update.html" + elif action == "retrieve": + template_name = f"{app_label}/{model_opts.model_name}_retrieve.html" + elif action == "list": + template_name = f"{app_label}/{model_opts.model_name}_list.html" + else: + template_name = super().get_template_name() + + return template_name + + +class SSOTInfobloxConfigChangeLogView(ObjectChangeLogView): + """SSOTInfobloxConfig ChangeLog View.""" + + base_template = "nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html" + + +class SSOTInfobloxConfigNotesView(ObjectNotesView): + """SSOTInfobloxConfig Notes View.""" + + base_template = "nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html" From 8859d5d21397a042b16634f763f8e8e8e825f87f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 21:57:39 +0100 Subject: [PATCH 049/229] Update Namespace methods. --- .../infoblox/diffsync/models/nautobot.py | 37 +++++++++++++++---- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index d0e5454c0..a13671bb1 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -12,7 +12,6 @@ from nautobot.ipam.models import VLAN as OrmVlan from nautobot.ipam.models import VLANGroup as OrmVlanGroup from nautobot.ipam.models import Namespace as OrmNamespace -from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView from nautobot_ssot.integrations.infoblox.utils.diffsync import ( create_tag_sync_from_infoblox, @@ -105,8 +104,7 @@ class NautobotNetwork(Network): @classmethod def create(cls, diffsync, ids, attrs): """Create Prefix object in Nautobot.""" - # Remap "default" Network View to "Global" Namespace - namespace_name = map_network_view_to_namespace(ids["namespace"]) + namespace_name = map_network_view_to_namespace(value=ids["namespace"], direction="nv_to_ns") _prefix = OrmPrefix( prefix=ids["network"], status_id=diffsync.status_map["Active"], @@ -217,7 +215,7 @@ def create(cls, diffsync, ids, attrs): try: status = diffsync.status_map[attrs["status"]] except KeyError: - status = diffsync.status_map[PLUGIN_CFG.get("default_status", "Active")] + status = diffsync.config.default_status.pk addr = f"{ids['address']}/{ids['prefix_length']}" if attrs.get("ip_addr_type"): if attrs["ip_addr_type"].lower() in IPAddressTypeChoices.as_dict(): @@ -256,7 +254,7 @@ def update(self, attrs): try: status = self.diffsync.status_map[attrs["status"]] except KeyError: - status = self.diffsync.status_map[PLUGIN_CFG.get("default_status", "Active")] + status = self.diffsync.config.default_status.pk _ipaddr.status_id = status if attrs.get("ip_addr_type"): if attrs["ip_addr_type"].lower() in IPAddressTypeChoices.as_dict(): @@ -389,6 +387,29 @@ def create(cls, diffsync, ids, attrs): ) if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_ns, extattrs=attrs["ext_attrs"]) - _ns.validated_save() - diffsync.namespace_map[ids["name"]] = _ns.id - return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + try: + _ns.validated_save() + diffsync.namespace_map[ids["name"]] = _ns.id + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + except ValidationError as err: + diffsync.job.logger.warning(f"Unable to create Namespace {_ns.name}. {err}") + return None + + def update(self, attrs): + """Update VLAN object in Nautobot.""" + _ns = OrmNamespace.objects.get(id=self.pk) + if "ext_attrs" in attrs: + process_ext_attrs(diffsync=self.diffsync, obj=_ns, extattrs=attrs["ext_attrs"]) + try: + _ns.validated_save() + return super().update(attrs) + except ValidationError as err: + self.diffsync.job.logger.warning(f"Unable to update Namespace {_ns.name}. {err}") + return None + + def delete(self): + """Don't allow deleting Namespaces in Nautobot.""" + self.diffsync.job.logger.error( + f"Deleting Namespaces in Nautobot is not allowed. Infoblox Network View: {self.get_identifiers()['name']}" + ) + raise NotImplementedError From b9beef7a5d2645f971f82e81fdb0e95f104bd325 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 21:57:56 +0100 Subject: [PATCH 050/229] Add SSOT Config migrations. --- .../0009_ssotconfig_ssotinfobloxconfig.py | 90 +++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py diff --git a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py new file mode 100644 index 000000000..e100e1845 --- /dev/null +++ b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py @@ -0,0 +1,90 @@ +# Generated by Django 3.2.23 on 2024-05-17 18:53 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion +import nautobot.core.models.fields +import nautobot.extras.models.mixins +import nautobot_ssot.integrations.infoblox.models +import uuid + + +class Migration(migrations.Migration): + dependencies = [ + ("extras", "0102_set_null_objectchange_contenttype"), + ("nautobot_ssot", "0008_auto_20240110_1019"), + ] + + operations = [ + migrations.CreateModel( + name="SSOTConfig", + fields=[ + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False)), + ], + options={ + "managed": False, + "default_permissions": ("view",), + }, + ), + migrations.CreateModel( + name="SSOTInfobloxConfig", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("created", models.DateTimeField(auto_now_add=True, null=True)), + ("last_updated", models.DateTimeField(auto_now=True, null=True)), + ( + "_custom_field_data", + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("name", models.CharField(max_length=255, unique=True)), + ("description", models.CharField(blank=True, max_length=255)), + ("infoblox_wapi_version", models.CharField(default="v2.12", max_length=255)), + ("enable_sync_to_infoblox", models.BooleanField(default=False)), + ("import_ip_addresses", models.BooleanField(default=False)), + ("import_subnets", models.BooleanField(default=False)), + ("import_vlan_views", models.BooleanField(default=False)), + ("import_vlans", models.BooleanField(default=False)), + ( + "infoblox_sync_filters", + models.JSONField( + default=nautobot_ssot.integrations.infoblox.models._get_default_sync_filters, + encoder=django.core.serializers.json.DjangoJSONEncoder, + ), + ), + ( + "infoblox_dns_view_mapping", + models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ( + "cf_fields_ignore", + models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("import_ipv4", models.BooleanField(default=True)), + ("import_ipv6", models.BooleanField(default=False)), + ("create_host_record", models.BooleanField(default=True)), + ("create_a_record", models.BooleanField(default=False)), + ("create_ptr_record", models.BooleanField(default=False)), + ("job_enabled", models.BooleanField(default=False)), + ("default_status", models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="extras.status")), + ( + "infoblox_instance", + models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="extras.externalintegration"), + ), + ("tags", nautobot.core.models.fields.TagsField(through="extras.TaggedItem", to="extras.Tag")), + ], + options={ + "verbose_name": "SSOT Infoblox Config", + "verbose_name_plural": "SSOT Infoblox Configs", + }, + bases=( + models.Model, + nautobot.extras.models.mixins.DynamicGroupMixin, + nautobot.extras.models.mixins.NotesMixin, + ), + ), + ] From 24bcfa8ddd3d9e727ab407d624e5d1ccf1214a85 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 21:58:31 +0100 Subject: [PATCH 051/229] Add SSOTInfobloxConfig templates. --- .../nautobot_ssot_infoblox_config.html | 12 ++ .../ssotinfobloxconfig_changelog.html | 105 ++++++++++++++ .../ssotinfobloxconfig_list.html | 12 ++ .../ssotinfobloxconfig_retrieve.html | 128 ++++++++++++++++++ .../ssotinfobloxconfig_update.html | 50 +++++++ 5 files changed, 307 insertions(+) create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/nautobot_ssot_infoblox_config.html create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_list.html create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/nautobot_ssot_infoblox_config.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/nautobot_ssot_infoblox_config.html new file mode 100644 index 000000000..c47ffc6fb --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/nautobot_ssot_infoblox_config.html @@ -0,0 +1,12 @@ +{% extends 'nautobot_ssot/config.html' %} +{% load helpers %} + +{% block title %}{{ block.super }} - Infoblox Configs{% endblock %} + +{% block content %} +
+
+ {% include 'utilities/obj_table.html' with table=infobloxconfig_table table_template='panel_table.html' heading='Infoblox Configs' %} +
+
+{% endblock content %} \ No newline at end of file diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html new file mode 100644 index 000000000..b51955b0b --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html @@ -0,0 +1,105 @@ +{% extends 'generic/object_retrieve.html' %} +{% load helpers %} +{% load buttons %} + +{% block breadcrumbs %} +
  • SSOT Configs
  • +
  • SSOT Infoblox Configs
  • +
  • {{ object|hyperlinked_object }}
  • +{% endblock breadcrumbs %} + +{% block extra_buttons %} + +{% endblock extra_buttons %} + +{% block masthead %} +

    + {% block title %}{{ object }}{% endblock title %} +

    +{% endblock masthead %} + +{% block content_left_page %} +
    +
    + Infoblox Config +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Name{{ object.name }}
    Description{{ object.description|placeholder }}
    Infoblox Instance{{ object.infoblox_instance|hyperlinked_object }}
    Default Status for Imported Objects{{ object.default_status|hyperlinked_object }}
    Infoblox WAPI Version{{ object.infoblox_wapi_version|placeholder }}
    Enable Sync from Nautobot to Infoblox{{ object.enable_sync_to_infoblox }}
    Import IP Addresses from Infoblox{{ object.import_ip_addresses }}
    Import VLANs from Infoblox{{ object.import_vlans }}
    Import VLAN Views from Infoblox{{ object.import_vlan_views }}
    Import IPv4 from Infoblox{{ object.import_ipv4 }}
    Import IPv6 from Infoblox{{ object.import_ipv6 }}
    Can be used in Sync Job{{ object.job_enabled }}
    +
    +{% endblock %} + +{% block content_right_page %} +
    +
    + Infoblox Sync Filters +
    + + + + +
    + {% include 'extras/inc/json_data.html' with data=object.infoblox_sync_filters format="json" %} +
    +
    +
    +
    + Infoblox Sync Filters +
    + + + + +
    + {% include 'extras/inc/json_data.html' with data=object.infoblox_dns_view_mapping format="json" %} +
    +
    + +{% endblock %} \ No newline at end of file diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_list.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_list.html new file mode 100644 index 000000000..b3349e3e8 --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_list.html @@ -0,0 +1,12 @@ +{% extends 'generic/object_list.html' %} +{% load helpers %} +{% load buttons %} + +{% block breadcrumbs %} +
  • SSOT Configs
  • +
  • SSOT Infoblox Configs
  • +{% endblock breadcrumbs %} + +{% block buttons %} + +{% endblock buttons %} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html new file mode 100644 index 000000000..3e19007e0 --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -0,0 +1,128 @@ +{% extends 'generic/object_retrieve.html' %} +{% load helpers %} +{% load buttons %} + +{% block breadcrumbs %} +
  • SSOT Configs
  • +
  • SSOT Infoblox Configs
  • +
  • {{ object|hyperlinked_object }}
  • +{% endblock breadcrumbs %} + +{% block extra_buttons %} + +{% endblock extra_buttons %} + +{% block masthead %} +

    + {% block title %}{{ object }}{% endblock title %} +

    +{% endblock masthead %} + +{% block content_left_page %} +
    +
    + Infoblox Config +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Name{{ object.name }}
    Description{{ object.description|placeholder }}
    Infoblox Instance{{ object.infoblox_instance|hyperlinked_object }}
    Default Status for Imported Objects{{ object.default_status|hyperlinked_object }}
    Infoblox WAPI Version{{ object.infoblox_wapi_version|placeholder }}
    Enable Sync from Nautobot to Infoblox{{ object.enable_sync_to_infoblox }}
    Import IP Addresses from Infoblox{{ object.import_ip_addresses }}
    Import VLANs from Infoblox{{ object.import_vlans }}
    Import VLAN Views from Infoblox{{ object.import_vlan_views }}
    Import IPv4 from Infoblox{{ object.import_ipv4 }}
    Import IPv6 from Infoblox{{ object.import_ipv6 }}
    Infoblox - Create IP Address as Host Record{{ object.create_host_record }}
    Infoblox - Create IP Address as A Record{{ object.create_a_record }}
    Infoblox - Create PTR Record for IP Address{{ object.create_ptr_record }}
    Can be used in Sync Job{{ object.job_enabled }}
    +
    +{% endblock %} + +{% block content_right_page %} +
    +
    + Infoblox Sync Filters +
    + + + + +
    + {% include 'extras/inc/json_data.html' with data=object.infoblox_sync_filters format="json" %} +
    +
    +
    +
    + Infoblox Network View to DNS View Mapping +
    + + + + +
    + {% include 'extras/inc/json_data.html' with data=object.infoblox_dns_view_mapping format="json" %} +
    +
    +
    +
    + Extensible Attributes/Custom Fields to Ignore +
    + + + + +
    + {% include 'extras/inc/json_data.html' with data=object.cf_fields_ignore format="json" %} +
    +
    +{% endblock %} \ No newline at end of file diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html new file mode 100644 index 000000000..225171dee --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -0,0 +1,50 @@ +{% extends 'generic/object_create.html' %} +{% load form_helpers %} + +{% block form %} +
    +
    Config Context
    +
    + {% render_field form.name %} + {% render_field form.description %} + {% render_field form.infoblox_instance %} + {% render_field form.infoblox_wapi_version %} + {% render_field form.enable_sync_to_infoblox %} + {% render_field form.import_ip_addresses %} + {% render_field form.import_subnets %} + {% render_field form.import_vlan_views %} + {% render_field form.import_vlans %} + {% render_field form.import_ipv4 %} + {% render_field form.import_ipv6 %} + {% render_field form.create_host_record %} + {% render_field form.create_a_record %} + {% render_field form.create_ptr_record %} + {% render_field form.default_status %} + {% render_field form.job_enabled %} +
    +
    +
    +
    Data
    +
    + {% render_field form.infoblox_sync_filters %} +
    +
    +
    +
    Data
    +
    + {% render_field form.infoblox_dns_view_mapping %} +
    +
    +
    +
    Data
    +
    + {% render_field form.cf_fields_ignore %} +
    +
    +
    +
    Notes
    +
    + {% render_field form.object_note %} +
    +
    +{% endblock %} \ No newline at end of file From d0b0a031950ba2be5c19653505240de60e77164f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:20:17 +0100 Subject: [PATCH 052/229] - Allow excluding attributes from sync. - Add mapping between network view and dns view. - Use debug value from the job to set logging level. - Add get_a_record_by_ref. - Add get_ptr_record_by_ref. - Add get_ptr_record_by_ip. - Add get_authoritative_zones_for_dns_view. - Add update_host_record. - Add update_ptr_record. - Add update_a_record. - Add get_dns_view_for_network_view. - Delete no longer used remove_duplicates. - Standardize JSONDecoderError handling. --- .../integrations/infoblox/utils/client.py | 782 ++++++++++++++---- 1 file changed, 620 insertions(+), 162 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 6886921bc..f66e0c18b 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -2,21 +2,20 @@ from __future__ import annotations -import json import ipaddress +import json import logging import re -from typing import Optional import urllib.parse from collections import defaultdict from typing import Optional + import requests +from dns import reversename from requests.auth import HTTPBasicAuth -from requests.exceptions import HTTPError from requests.compat import urljoin -from dns import reversename -from nautobot.core.settings_funcs import is_truthy -from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG +from requests.exceptions import HTTPError + from nautobot_ssot.integrations.infoblox.utils.diffsync import get_ext_attr_dict logger = logging.getLogger("nautobot.ssot.infoblox") @@ -36,19 +35,24 @@ def parse_url(address): return urllib.parse.urlparse(address) -def get_default_ext_attrs(review_list: list) -> dict: +def get_default_ext_attrs(review_list: list, excluded_attrs: Optional[list] = None) -> dict: """Determine the default Extensibility Attributes for an object being processed. Args: review_list (list): The list of objects that need to be reviewed to gather default Extensibility Attributes. + excluded_attrs (list): List of Extensibility Attributes to exclude. Returns: dict: Dictionary of default Extensibility Attributes for a VLAN View, VLANs, Prefixes, or IP Addresses. """ + if excluded_attrs is None: + excluded_attrs = [] default_ext_attrs = {} for item in review_list: - pf_ext_attrs = get_ext_attr_dict(extattrs=item.get("extattrs", {})) + pf_ext_attrs = get_ext_attr_dict(extattrs=item.get("extattrs", {}), excluded_attrs=excluded_attrs) for attr in pf_ext_attrs: + if attr in excluded_attrs: + continue if attr not in default_ext_attrs: default_ext_attrs[attr] = None return default_ext_attrs @@ -99,11 +103,14 @@ class InfobloxApi: # pylint: disable=too-many-public-methods, too-many-instanc def __init__( self, - url=PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_URL"), - username=PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_USERNAME"), - password=PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_PASSWORD"), - verify_ssl=is_truthy(PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_VERIFY_SSL")), - wapi_version=PLUGIN_CFG.get("NAUTOBOT_INFOBLOX_WAPI_VERSION"), + url, + username, + password, + verify_ssl, + wapi_version, + timeout, + debug=False, + network_view_to_dns_map=None, cookie=None, ): # pylint: disable=too-many-arguments """Initialize Infoblox class.""" @@ -117,7 +124,17 @@ def __init__( self.url = parsed_url.geturl() self.auth = HTTPBasicAuth(username, password) self.wapi_version = wapi_version + self.timeout = timeout self.session = self._init_session(verify_ssl=verify_ssl, cookie=cookie) + # Used to select correct DNS View when creating DNS records + self.network_view_to_dns_map = {} + if network_view_to_dns_map and isinstance(network_view_to_dns_map, dict): + self.network_view_to_dns_map.update(network_view_to_dns_map) + # Change logging level to Debug if Debug checkbox is ticked in the Job form + logging_level = logging.DEBUG if debug else logging.INFO + logger.setLevel(logging_level) + for handler in logger.handlers: + handler.setLevel(logging_level) def _init_session(self, verify_ssl: bool, cookie: Optional[dict]) -> requests.Session: """Initialize requests Session object that is used across all the API calls. @@ -161,7 +178,7 @@ def _request(self, method, path, **kwargs): else: self.session.auth = self.auth - resp = self.session.request(method, url, timeout=PLUGIN_CFG["infoblox_request_timeout"], **kwargs) + resp = self.session.request(method, url, timeout=self.timeout, **kwargs) # Infoblox provides meaningful error messages for error codes >= 400 if resp.status_code >= 400: try: @@ -189,7 +206,7 @@ def _delete(self, resource): logger.debug(response.json()) return response.json() except json.decoder.JSONDecodeError: - logger.info(response.text) + logger.error(response.text) return response.text def _update(self, resource, **params): @@ -210,7 +227,7 @@ def _update(self, resource, **params): logger.debug(response.json()) return response.json() except json.decoder.JSONDecodeError: - logger.info(response.text) + logger.error(response.text) return response.text def _get_network_ref( @@ -237,10 +254,10 @@ def _get_network_ref( logger.debug(response.json()) results = response.json().get("result") except json.decoder.JSONDecodeError: - logger.info(response.text) + logger.error(response.text) return response.text - if results and len(results): - return results[0] + if results: + return results[0].get("_ref") return None def _get_network_container_ref( @@ -267,7 +284,7 @@ def _get_network_container_ref( logger.debug(response.json()) results = response.json().get("result") except json.decoder.JSONDecodeError: - logger.info(response.text) + logger.error(response.text) return response.text if results and len(results): return results[0] @@ -348,7 +365,7 @@ def get_ipaddrs(url_path: str, data: dict) -> list: try: response = self._request(method="POST", path=url_path, json=data) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) if response: # This should flatten the results, not return the first entry results = [] @@ -412,6 +429,7 @@ def create_network(self, prefix, comment=None, network_view: Optional[str] = Non Args: prefix (str): IP network to create. + network_view (str): Name of the network view, e.g. 'dev' Returns: (str) of reference network @@ -432,6 +450,7 @@ def delete_network(self, prefix, network_view: Optional[str] = None): Args: prefix (str): IPv4 prefix to delete. + network_view (str): Name of the network view, e.g. 'dev' Returns: (dict) deleted prefix. @@ -456,6 +475,7 @@ def update_network(self, prefix, comment=None, network_view: Optional[str] = Non Args: (str): IPv4 prefix to update. comment (str): IPv4 prefix update comment. + network_view (str): Name of the network view, e.g. 'dev' Returns: (dict) updated prefix. @@ -479,6 +499,7 @@ def create_network_container(self, prefix, comment=None, network_view: Optional[ Args: prefix (str): IP network to create. + network_view (str): Name of the network view, e.g. 'dev' Returns: (str) of reference network @@ -499,6 +520,7 @@ def delete_network_container(self, prefix, network_view: Optional[str] = None): Args: prefix (str): IPv4 prefix to delete. + network_view (str): Name of the network view, e.g. 'dev' Returns: (dict) deleted prefix. @@ -524,6 +546,7 @@ def update_network_container(self, prefix, comment=None, network_view: Optional[ Args: (str): IPv4 prefix to update. comment (str): IPv4 prefix update comment. + network_view (str): Name of the network view, e.g. 'dev' Returns: (dict) updated prefix. @@ -550,6 +573,7 @@ def create_range(self, prefix: str, start: str, end: str, network_view: Optional prefix: IP network range belongs to. start: The starting IP of the range. end: The ending IP of the range. + network_view (str): Name of the network view, e.g. 'dev' Returns: str: Object reference of range. @@ -570,6 +594,7 @@ def get_host_record_by_name(self, fqdn, network_view: Optional[str] = None): Args: fqdn (str): IPv4 Address to look up + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -597,14 +622,20 @@ def get_host_record_by_name(self, fqdn, network_view: Optional[str] = None): if network_view: params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_host_record_by_ip(self, ip_address, network_view: Optional[str] = None): """Get the host record by using IP Address. Args: ip_address (str): IPv4 Address to look up + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -632,14 +663,20 @@ def get_host_record_by_ip(self, ip_address, network_view: Optional[str] = None): if network_view: params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_a_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the A record for a FQDN. Args: fqdn (str): "testdevice1.test" + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -656,20 +693,24 @@ def get_a_record_by_name(self, fqdn, network_view: Optional[str] = None): """ url_path = "record:a" params = {"name": fqdn, "_return_as_object": 1} - # TODO: This is a bit more complicated. One network view can have multiple DNS views - # default name for a DNS view for a network view is formed by prepending "default." to the network view name if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_a_record_by_ip(self, ip_address, network_view: Optional[str] = None): """Get the A record for a IP Address. Args: ip_address (str): "10.220.0.101" + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -685,22 +726,144 @@ def get_a_record_by_ip(self, ip_address, network_view: Optional[str] = None): ] """ url_path = "record:a" - params = {"ipv4addr": ip_address, "_return_as_object": 1} - # TODO: This is a bit more complicated. One network view can have multiple DNS views - # default name for a DNS view for a network view is formed by prepending "default." to the network view name - # TODO: Would be interesting to see if we can specify network view in the lookup + params = { + "ipv4addr": ip_address, + "_return_as_object": 1, + "_return_fields": "name,view,ipv4addr,comment", + } if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + if results: + return results[0] + return None + + def get_a_record_by_ref(self, ref: str): + """Get the A record by ref. + + Args: + ref (str): reference to the A record + + Returns: + (dict) A record + + Return Response: + [ + { + "_ref": "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default", + "ipv4addr": "10.220.0.101", + "name": "testdevice1.test", + "view": "default" + } + ] + """ + url_path = f"{ref}" + params = { + "_return_fields": "name,view,ipv4addr,comment", + } + response = self._request("GET", path=url_path, params=params) + logger.error(response.text) + try: + logger.debug(response.json()) + return response.json() + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def get_ptr_record_by_ref(self, ref: str): + """Get the PTR record by FQDN. + + Args: + ref (str): Reference to PTR record + + Returns: + (dict) PTR Record + + Return Response: + [ + { + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC5fZGVmYXVsdC50ZXN0LjEwMS4wLjIyMC4xMC50ZXN0ZGV2aWNlMS50ZXN0:10.220.0.101.test/default", + "ptrdname": "testdevice1.test", + "view": "default" + } + ] + """ + url_path = f"{ref}" + params = { + "_return_fields": "name,ptrdname,ipv4addr,ipv6addr,view,comment", + } + response = self._request("GET", path=url_path, params=params) + logger.error(response.text) + try: + logger.debug(response.json()) + return response.json() + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def get_ptr_record_by_ip( + self, ip_address, network_view: Optional[str] = None + ): # pylint: disable=inconsistent-return-statements + """Get the PTR record by FQDN. + + Args: + ip_address (str): "record:ptr/ZG5zLmJpbmRfcHRyJC5fZGVmYXVsdC50ZXN0LjEwMS4wLjIyMC4xMC50ZXN0ZGV2aWNlMS50ZXN0:10.220.0.101.test/default" + network_view (str): Name of the network view, e.g. 'dev' + + Returns: + (dict) PTR Record + + Return Response: + { + "result": [ + { + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuMS5ob3N0MS5uYXV0b2JvdC5sb2NhbC50ZXN0:1.0.0.10.in-addr.arpa/default.dev", + "extattrs": { + + }, + "ipv4addr": "10.0.0.1", + "ipv6addr": "", + "name": "1.0.0.10.in-addr.arpa", + "ptrdname": "host1.nautobot.local.test", + "view": "default.dev", + "zone": "in-addr.arpa" + } + ] + } + """ + url_path = "record:ptr" + params = { + "ipv4addr": ip_address, + "_return_as_object": 1, + "_return_fields": "ipv4addr,ipv6addr,name,view,extattrs,comment,zone,ptrdname", + } + if network_view: + dns_view = self.get_dns_view_for_network_view(network_view) + params["view"] = dns_view + response = self._request("GET", url_path, params=params) + try: + logger.debug(response.json()) + results = response.json().get("result") + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + if results: + return results[0] + return None def get_ptr_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the PTR record by FQDN. Args: fqdn (str): "testdevice1.test" + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -717,11 +880,16 @@ def get_ptr_record_by_name(self, fqdn, network_view: Optional[str] = None): url_path = "record:ptr" params = {"ptrdname": fqdn, "_return_as_object": 1} if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_all_dns_views(self): """Get all dns views. @@ -746,8 +914,13 @@ def get_all_dns_views(self): url_path = "view" params = {"_return_fields": "is_default,name,network_view", "_return_as_object": 1} response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): """Create an A record for a given FQDN. @@ -755,6 +928,9 @@ def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): Please note: This API call with work only for host records that do not have an associated a record. If an a record already exists, this will return a 400 error. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Dict: Dictionary of _ref and name @@ -768,11 +944,16 @@ def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): params = {"_return_fields": "name", "_return_as_object": 1} payload = {"name": fqdn, "ipv4addr": ip_address} if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) payload["view"] = dns_view response = self._request("POST", url_path, params=params, json=payload) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_dhcp_lease(self, lease_to_check): """Get a DHCP lease for the IP/hostname passed in. @@ -801,6 +982,7 @@ def get_dhcp_lease_from_ipv4(self, ip_address, network_view: Optional[str] = Non Args: ip_address (str): "192.168.0.1" + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -824,14 +1006,20 @@ def get_dhcp_lease_from_ipv4(self, ip_address, network_view: Optional[str] = Non if network_view: params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_dhcp_lease_from_hostname(self, hostname, network_view: Optional[str] = None): """Get a DHCP lease for the hostname passed in. Args: hostnames (str): "testdevice1.test" + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -855,8 +1043,13 @@ def get_dhcp_lease_from_hostname(self, hostname, network_view: Optional[str] = N if network_view: params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_all_ranges( self, prefix: Optional[str] = None, network_view: Optional[str] = None @@ -865,6 +1058,7 @@ def get_all_ranges( Args: prefix: Network prefix - '10.220.0.0/22' + network_view (str): Name of the network view, e.g. 'dev' Returns: dict: The mapping of network_view to prefix to defined ranges. @@ -889,10 +1083,15 @@ def get_all_ranges( try: response = self._request("GET", url_path, params=params) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) return {} - json_response = response.json() - logger.debug(json_response()) + try: + json_response = response.json() + logger.debug(json_response) + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + data = defaultdict(lambda: defaultdict(list)) for prefix_range in json_response: str_range = f"{prefix_range['start_addr']}-{prefix_range['end_addr']}" @@ -905,6 +1104,7 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: Args: prefix (str): Network prefix - '10.220.0.0/22' ipv6 (bool): Whether or not the call should be made for IPv6 subnets. + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -946,10 +1146,14 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: try: response = self._request("GET", url_path, params=params) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) return [] - json_response = response.json() - logger.debug(json_response()) + try: + logger.debug(response.json()) + json_response = response.json() + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text # TODO: What does the below code do? We don't return any of this. @progala if not ipv6: ranges = self.get_all_ranges(prefix=prefix, network_view=network_view) @@ -963,7 +1167,10 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: return json_response def get_authoritative_zone(self, network_view: Optional[str] = None): - """Get authoritative zone to check if fqdn exists. + """Get authoritative zones to check if FQDN exists. + + Args: + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of zone dicts @@ -985,15 +1192,59 @@ def get_authoritative_zone(self, network_view: Optional[str] = None): url_path = "zone_auth" params = {"_return_as_object": 1} if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) params["view"] = dns_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def get_authoritative_zones_for_dns_view(self, view: str): + """Get authoritative zone list for given DNS view. + + Returns: + (list) of zone dicts + view (str): Name of the DNS view, e.g. 'default.dev' + + Return Response: + [ + { + "_ref": "zone_auth/ZG5zLnpvbmUkLl9kZWZhdWx0LnRlc3Qtc2l0ZS1pbm5hdXRvYm90:test-site-innautobot/default", + "fqdn": "test-site-innautobot", + "view": "default" + }, + { + "_ref": "zone_auth/ZG5zLnpvbmUkLl9kZWZhdWx0LnRlc3Qtc2l0ZQ:test-site/default", + "fqdn": "test-site", + "view": "default" + }, + ] + """ + url_path = "zone_auth" + params = { + "view": view, + "_return_fields": "fqdn,view", + "_return_as_object": 1, + } + response = self._request("GET", path=url_path, params=params) + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def _find_network_reference(self, network, network_view: Optional[str] = None): """Find the reference for the given network. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Dict: Dictionary of _ref and name @@ -1011,12 +1262,20 @@ def _find_network_reference(self, network, network_view: Optional[str] = None): if network_view: params["network_view"] = network_view response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def find_next_available_ip(self, network, network_view: Optional[str] = None): """Find the next available ip address for a given network. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Dict: @@ -1032,8 +1291,11 @@ def find_next_available_ip(self, network, network_view: Optional[str] = None): try: network_ref_id = self._find_network_reference(network=network, network_view=network_view) except Exception as err: # pylint: disable=broad-except - # TODO: Add network-view to the error @progala - logger.warning("Network reference not found for %s: %s", network, err) + if network_view: + err_msg = f"Network reference not found for {network}-{network_view}: {str(err)}" + else: + err_msg = f"Network reference not found for {network}: {str(err)}" + logger.warning(err_msg) return next_ip_avail if network_ref_id and isinstance(network_ref_id, list): @@ -1050,6 +1312,9 @@ def find_next_available_ip(self, network, network_view: Optional[str] = None): def reserve_fixed_address(self, network, mac_address, network_view: Optional[str] = None): """Reserve the next available ip address for a given network range. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Str: The IP Address that was reserved @@ -1065,13 +1330,21 @@ def reserve_fixed_address(self, network, mac_address, network_view: Optional[str if network_view: payload["network_view"] = network_view response = self._request("POST", url_path, params=params, json=payload) - logger.debug(response.json()) - return response.json().get("result").get("ipv4addr") + try: + logger.debug(response.json()) + results = response.json().get("result").get("ipv4addr") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text return False def create_fixed_address(self, ip_address, mac_address, network_view: Optional[str] = None): """Create a fixed ip address within Infoblox. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Str: The IP Address that was reserved @@ -1084,8 +1357,13 @@ def create_fixed_address(self, ip_address, mac_address, network_view: Optional[s if network_view: payload["network_view"] = network_view response = self._request("POST", url_path, params=params, json=payload) - logger.debug(response.json()) - return response.json().get("result").get("ipv4addr") + try: + logger.debug(response.json()) + results = response.json().get("result").get("ipv4addr") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = None): """Create a host record for a given FQDN. @@ -1093,6 +1371,9 @@ def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = Non Please note: This API call with work only for host records that do not have an associated a record. If an a record already exists, this will return a 400 error. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: Dict: Dictionary of _ref and name @@ -1111,30 +1392,71 @@ def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = Non try: response = self._request("POST", url_path, params=params, json=payload) except HTTPError as err: - logger.info("Host record error: %s", err.response.text) + logger.error("Host record error: %s", err.response.text) return [] - logger.debug("Infoblox host record created: %s", response.json()) - return response.json().get("result") + try: + logger.debug("Infoblox host record created: %s", response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def update_host_record(self, ref, data): + """Update a host record for a given FQDN. + + Please note: This API call should only be used for host records that do not have an associated A record. + + Args: + ref (str): Reference to Host record + + Returns: + Dict: Dictionary of _ref and name + + Return Response: + { + + "_ref": "record:host/ZG5zLmhvc3QkLjEuY29tLmluZm9ibG94Lmhvc3Q:host.infoblox.com/default.test", + "name": "host.infoblox.com", + } + """ + params = {} + try: + response = self._request("PUT", path=ref, params=params, json=data) + except HTTPError as err: + logger.error("Could not update Host address: %s for ref %s", err.response.text, ref) + return None + try: + logger.debug("Infoblox host record updated: %s", response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def delete_host_record(self, ip_address, network_view: Optional[str] = None): - """Delete provided IP Address from Infoblox.""" + """Delete provided IP Address from Infoblox. + + Args: + network_view (str): Name of the network view, e.g. 'dev' + """ resource = self.get_host_record_by_ip(ip_address=ip_address, network_view=network_view) - # TODO: Add network view to messages @progala if resource: ref = resource[0]["_ref"] self._delete(ref) - response = {"deleted": ip_address} + response = {"deleted": ip_address, "network_view": network_view} else: - response = {"error": f"Did not find {ip_address}"} + response = {"error": f"Did not find IP address {ip_address} in network view {network_view}"} logger.debug(response) return response def create_ptr_record(self, fqdn, ip_address, network_view: Optional[str] = None): - """Create an PTR record for a given FQDN. + """Create a PTR record for a given FQDN. Args: fqdn (str): Fully Qualified Domain Name ip_address (str): Host IP address + network_view (str): Name of the network view, e.g. 'dev' Returns: Dict: Dictionary of _ref and name @@ -1152,14 +1474,18 @@ def create_ptr_record(self, fqdn, ip_address, network_view: Optional[str] = None reverse_host = str(reversename.from_address(ip_address))[ 0:-1 ] # infoblox does not accept the top most domain '.', so we strip it - payload = {"name": reverse_host, "ptrdname": fqdn} # , "ipv4addr": ip_address} + payload = {"name": reverse_host, "ptrdname": fqdn, "ipv4addr": ip_address} if network_view: - dns_view = self.get_default_dns_view_for_network_view(network_view) + dns_view = self.get_dns_view_for_network_view(network_view) payload["view"] = dns_view response = self._request("POST", url_path, params=params, json=payload) - # TODO: Add network view/dns view to the message @progala - logger.info("Infoblox PTR record created: %s", response.json()) - return response.json().get("result") + try: + logger.debug("Infoblox PTR record created: %s", response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def search_ipv4_address(self, ip_address): """Find if IP address is in IPAM. Returns empty list if address does not exist. @@ -1192,8 +1518,13 @@ def search_ipv4_address(self, ip_address): url_path = "search" params = {"address": ip_address, "_return_as_object": 1} response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json().get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_vlan_view(self, name="Nautobot"): """Retrieve a specific vlanview. @@ -1217,8 +1548,13 @@ def get_vlan_view(self, name="Nautobot"): url_path = "vlanview" params = {"name": name} response = self._request("GET", path=url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def create_vlan_view(self, name, start_vid=1, end_vid=4094): """Create a vlan view. @@ -1237,8 +1573,13 @@ def create_vlan_view(self, name, start_vid=1, end_vid=4094): url_path = "vlanview" params = {"name": name, "start_vlan_id": start_vid, "end_vlan_id": end_vid} response = self._request("POST", path=url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_vlanviews(self): """Retrieve all VLANViews from Infoblox. @@ -1267,8 +1608,13 @@ def get_vlanviews(self): url_path = "vlanview" params = {"_return_fields": "name,comment,start_vlan_id,end_vlan_id,extattrs"} response = self._request("GET", url_path, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_vlans(self): """Retrieve all VLANs from Infoblox. @@ -1316,11 +1662,16 @@ def get_vlans(self): ] ) response = self._request("POST", url_path, data=payload) - logger.debug(response.json()) + try: + logger.debug(response.json()) + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + if len(response.json()): return response.json()[0] - else: - return [] + + return [] def create_vlan(self, vlan_id, vlan_name, vlan_view): """Create a VLAN in Infoblox. @@ -1347,8 +1698,13 @@ def create_vlan(self, vlan_id, vlan_name, vlan_view): params = {} payload = {"parent": parent, "id": vlan_id, "name": vlan_name} response = self._request("POST", url_path, params=params, json=payload) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text @staticmethod def get_ipaddr_status(ip_record: dict) -> str: @@ -1376,20 +1732,93 @@ def _find_matching_resources(self, resource, **params): _ref: fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMjIwLjAuMy4wLi4:10.220.0.3/default """ response = self._request("GET", resource, params=params) - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def update_ptr_record(self, ref, data): # pylint: disable=inconsistent-return-statements + """Update a PTR Record. + + Args: + ref (str): Reference to PTR record + data (dict): keyword args used to update the object e.g. comment="updateme" + + Returns: + Dict: Dictionary of _ref and name + + Return Response: + { + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC5fZGVmYXVsdC5hcnBhLmluLWFkZHIuMTAuMjIzLjkuOTYucjQudGVzdA:96.9.223.10.in-addr.arpa/default", + "ipv4addr": "10.223.9.96", + "name": "96.9.223.10.in-addr.arpa", + "ptrdname": "r4.test" + } + """ + params = {} + try: + logger.debug(data) + response = self._request("PUT", path=ref, params=params, json=data) + except HTTPError as err: + logger.error("Could not update PTR address: %s for ref %s", err.response.text, ref) + return None + try: + logger.debug("Infoblox PTR Address updated: %s", response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def update_a_record(self, ref, data): # pylint: disable=inconsistent-return-statements + """Update an A record. + + Args: + ref (str): Reference to A record + data (dict): keyword args used to update the object e.g. comment="updateme" + + Returns: + Dict: Dictionary of _ref and name + + Return Response: + { + "_ref": "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default", + "ipv4addr": "10.220.0.101", + "name": "testdevice1.test", + "view": "default" + } + """ + params = {} + try: + logger.debug(data) + response = self._request("PUT", path=ref, params=params, json=data) + except HTTPError as err: + logger.error("Could not update DNS address: %s for ref %s", err.response.text, ref) + return None + try: + logger.debug("Infoblox DNS Address updated: %s", response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text - # TODO: See if we should accept params dictionary and extended to both host record and fixed address - # TODO: This doesn't work very well at all currently @progala # Perhaps make multiple searches, or go through types returned by the search def update_ipaddress( - self, ip_address, network_view: Optional[str] = None, **data + self, + ip_address, + data, + network_view: Optional[str] = None, ): # pylint: disable=inconsistent-return-statements """Update a IP Address object with a given ip address. Args: ip_address (str): Valid IP address data (dict): keyword args used to update the object e.g. comment="updateme" + network_view (str): Name of the network view, e.g. 'dev' Returns: Dict: Dictionary of _ref and name @@ -1404,44 +1833,37 @@ def update_ipaddress( # resources.extend(self._find_matching_resources("search", search_string=ip_address, objtype="record:host")) resources = self._find_matching_resources("search", address=ip_address) if not resources: - return - found_ipv4_ref = None + return None + ipv4_ref = None # We can get multiple resources of varying types. The name of resource is embedded in the `_ref` attr resource_types = ["fixedaddress"] - if network_view: - for resource in resources: - ref = resource.get("_ref") - if ref.split("/")[0] not in resource_types: - continue - if resource.get("network_view") != network_view: - continue - if resource.get("ipv4addr") != ip_address: - continue - found_ipv4_ref = ref - break - else: - for resource in resources: - ref = resource.get("_ref") - if ref.split("/")[0] not in resource_types: - continue - if resource.get("ipv4addr") != ip_address: - continue - found_ipv4_ref = ref - break - - if not found_ipv4_ref: - return - # params = {"_return_fields": "ipv4addr", "_return_as_object": 1} + for resource in resources: + ref = resource.get("_ref") + if ref.split("/")[0] not in resource_types: + continue + if network_view and resource.get("network_view") != network_view: + continue + if resource.get("ipv4addr") != ip_address: + continue + ipv4_ref = ref + break + + if not ipv4_ref: + return None params = {} try: logger.debug(data) - response = self._request("PUT", path=found_ipv4_ref, params=params, json=data) + response = self._request("PUT", path=ipv4_ref, params=params, json=data) except HTTPError as err: - logger.info("Resource: %s", found_ipv4_ref) - logger.info("Could not update IP address: %s", err.response.text) - return - logger.info("Infoblox IP Address updated: %s", response.json()) - return response.json() + logger.error("Could not update IP address: %s for ref %s", err.response.text, ipv4_ref) + return None + try: + logger.debug("Infoblox IP Address updated: %s", response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_tree_from_container(self, root_container: str, network_view: Optional[str] = None) -> list: """Returns the list of all child containers from a given root container.""" @@ -1468,23 +1890,13 @@ def get_tree_from_container(self, root_container: str, network_view: Optional[st return flattened_tree - def remove_duplicates(self, network_list: list) -> list: - """Removes duplicate networks from a list of networks.""" - seen_networks = set() - new_list = [] - for network in network_list: - if network["network"] not in seen_networks: - new_list.append(network) - seen_networks.add(network["network"]) - - return new_list - def get_network_containers(self, prefix: str = "", ipv6: bool = False, network_view: Optional[str] = None): """Get all Network Containers. Args: prefix (str): Specific prefix (192.168.0.1/24) ipv6 (bool): Whether the call should be made for IPv6 network containers. + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -1516,9 +1928,12 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False, network_v if prefix: params.update({"network": prefix}) response = self._request("GET", url_path, params=params) - response = response.json() - logger.debug(response) - results = response.get("result", []) + try: + logger.debug(response.json()) + results = response.json().get("result", []) + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text for res in results: res.update({"status": "container"}) return results @@ -1526,6 +1941,9 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False, network_v def get_child_network_containers(self, prefix: str, network_view: Optional[str] = None): """Get all Child Network Containers for Container. + Args: + network_view (str): Name of the network view, e.g. 'dev' + Returns: (list) of record dicts @@ -1559,9 +1977,12 @@ def get_child_network_containers(self, prefix: str, network_view: Optional[str] params.update({"network_view": network_view}) params.update({"network_container": prefix}) response = self._request("GET", url_path, params=params) - response = response.json() - logger.debug(response) - results = response.get("result", []) + try: + logger.debug(response.json()) + results = response.json().get("result", []) + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text for res in results: res.update({"status": "container"}) return results @@ -1571,6 +1992,7 @@ def get_child_subnets_from_container(self, prefix: str, network_view: Optional[s Args: prefix (str): Network prefix - '10.220.0.0/22' + network_view (str): Name of the network view, e.g. 'dev' Returns: (list) of record dicts @@ -1610,11 +2032,15 @@ def get_child_subnets_from_container(self, prefix: str, network_view: Optional[s try: response = self._request("GET", url_path, params=params) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) return [] - response = response.json() - logger.debug(response) - return response.get("result") + try: + logger.debug(response.json()) + results = response.json().get("result") + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_network_views(self): """Get all network views. @@ -1666,10 +2092,15 @@ def get_network_views(self): try: response = self._request("GET", url_path, params=params) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) return [] - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text def get_network_view(self, name: str): """Get network view object for given name. @@ -1703,10 +2134,38 @@ def get_network_view(self, name: str): try: response = self._request("GET", path=url_path, params=params) except HTTPError as err: - logger.info(err.response.text) + logger.error(err.response.text) return [] - logger.debug(response.json()) - return response.json() + try: + logger.debug(response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def get_dns_view_for_network_view(self, network_view: str): + """Get DNS view for given network view. + + Use DNS view defined in the Infoblox Config. If the mapping is not defined retrieve the default DNS View. + + Args: + network_view (str): Name of the network view - 'dev' + + Returns: + (str) name of the DNS view + """ + if network_view in self.network_view_to_dns_map: + return self.network_view_to_dns_map[network_view] + + dns_view = self.get_default_dns_view_for_network_view(network_view) + # Cache the value to avoid excessive API queries + if dns_view: + self.network_view_to_dns_map[network_view] = dns_view + else: + logger.warning(f"Cannot find DNS View for Network View {network_view}.") + + return dns_view def get_default_dns_view_for_network_view(self, network_view: str): """Get default (first on the list) DNS view for given network view. @@ -1715,11 +2174,10 @@ def get_default_dns_view_for_network_view(self, network_view: str): network_view (str): Name of the network view - 'dev' Returns: - (str) name of the default dns view + (str) name of the default DNS view """ _network_view = self.get_network_view(network_view) - logger.info(_network_view) - if _network_view: + if _network_view and "associated_dns_views" in _network_view[0]: return _network_view[0]["associated_dns_views"][0] - else: - return None + logger.debug(_network_view) + return None From 4b897c8a27401915bd03cf9c8c7066b7dd41f1ca Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:20:59 +0100 Subject: [PATCH 053/229] Remove legacy config. --- .../integrations/infoblox/constant.py | 33 ------------------- 1 file changed, 33 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index 01ed95463..a1e8b530a 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -1,35 +1,2 @@ """Constants for use with the Infoblox SSoT app.""" - -from django.conf import settings - - -def _read_app_config(): - """Provides backward compatible object after integrating into `nautobot_ssot` App.""" - config = settings.PLUGINS_CONFIG["nautobot_ssot"] - - return { - "NAUTOBOT_INFOBLOX_URL": config.get("infoblox_url"), - "NAUTOBOT_INFOBLOX_USERNAME": config.get("infoblox_username"), - "NAUTOBOT_INFOBLOX_PASSWORD": config.get("infoblox_password"), - "NAUTOBOT_INFOBLOX_VERIFY_SSL": config.get("infoblox_verify_ssl"), - "NAUTOBOT_INFOBLOX_WAPI_VERSION": config.get("infoblox_wapi_version"), - "NAUTOBOT_INFOBLOX_NETWORK_VIEW": config.get("infoblox_network_view"), - "enable_sync_to_infoblox": config.get("infoblox_enable_sync_to_infoblox"), - "enable_rfc1918_network_containers": config.get("infoblox_enable_rfc1918_network_containers"), - "default_status": config.get("infoblox_default_status"), - "infoblox_import_objects": { - "vlan_views": config.get("infoblox_import_objects_vlan_views"), - "vlans": config.get("infoblox_import_objects_vlans"), - "subnets": config.get("infoblox_import_objects_subnets"), - "subnets_ipv6": config.get("infoblox_import_objects_subnets_ipv6"), - "ip_addresses": config.get("infoblox_import_objects_ip_addresses"), - }, - "infoblox_import_subnets": config.get("infoblox_import_subnets"), - "infoblox_request_timeout": int(config.get("infoblox_request_timeout", 60)), - "infoblox_sync_filters": config.get("infoblox_sync_filters"), - } - - -# Import config vars from nautobot_config.py -PLUGIN_CFG = _read_app_config() TAG_COLOR = "40bfae" From ba6c2676992ad5571c9f491277319f0361ebdcfa Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:21:49 +0100 Subject: [PATCH 054/229] Enhance record create/update logic. --- .../infoblox/diffsync/models/infoblox.py | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 265279210..fbd6fad6e 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -103,13 +103,13 @@ def create(cls, diffsync, ids, attrs): diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - if diffsync.config.create_a_record: + if diffsync.config.create_a_record and attrs.get("has_a_record"): diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) # Only create PTR records if A record has been created - if diffsync.config.create_ptr_record: + if diffsync.config.create_ptr_record and attrs.get("has_ptr_record"): diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) - elif diffsync.config.create_host_record: - diffsync.conn.create_host_record(dns_name, ip_address) + elif diffsync.config.create_host_record and attrs.get("has_host_record"): + diffsync.conn.create_host_record(dns_name, ip_address, network_view=network_view) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) def update(self, attrs): # pylint: disable=too-many-branches @@ -141,9 +141,7 @@ def update(self, attrs): # pylint: disable=too-many-branches # Only allows creating/updating Host record if IP Address doesn't have a corresponding A or PTR record. incompatible_record_types = False if attrs.get("has_a_record", False) and self.diffsync.config.create_a_record and inf_attrs["has_host_record"]: - incomp_msg = ( - f"Cannot create/update A Record for IP Address, {ip_address}. It already has an existing Host Record." - ) + incomp_msg = f"Cannot update A Record for IP Address, {ip_address}. It already has an existing Host Record." incompatible_record_types = True elif ( attrs.get("has_ptr_record", False) @@ -159,9 +157,7 @@ def update(self, attrs): # pylint: disable=too-many-branches and self.diffsync.config.create_host_record and inf_attrs["has_a_record"] ): - incomp_msg = ( - f"Cannot create/update Host Record for IP Address, {ip_address}. It already has an existing A Record" - ) + incomp_msg = f"Cannot update Host Record for IP Address, {ip_address}. It already has an existing A Record." incompatible_record_types = True elif ( attrs.get("has_host_record", False) @@ -169,7 +165,7 @@ def update(self, attrs): # pylint: disable=too-many-branches and inf_attrs["has_ptr_record"] ): incomp_msg = ( - f"Cannot create/update Host Record for IP Address, {ip_address}. It already has an existing PTR Record" + f"Cannot update Host Record for IP Address, {ip_address}. It already has an existing PTR Record." ) incompatible_record_types = True @@ -182,9 +178,9 @@ def update(self, attrs): # pylint: disable=too-many-branches host_record_action = "none" if self.diffsync.config.create_a_record and inf_attrs["has_a_record"]: a_record_action = "update" - if self.diffsync.config.create_ptr_record: - ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" - elif self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: + if self.diffsync.config.create_ptr_record: + ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" + if self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: host_record_action = "update" # IP Address in Infoblox is not a plain IP Address like in Nautobot. From dd4b52454e340b2b29bbb3986c0a0dd292f9a1e8 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:22:36 +0100 Subject: [PATCH 055/229] - Allow excluding attributes from sync. - Add DNS name validation. - Update network view to namespace mapping logic. --- .../integrations/infoblox/utils/diffsync.py | 75 +++++++++++++++++-- 1 file changed, 67 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index 2615e1d23..c2f70b7b2 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,5 +1,5 @@ """Utilities for DiffSync related stuff.""" - +from typing import Optional from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify from nautobot.ipam.models import IPAddress, Prefix, VLAN @@ -47,7 +47,7 @@ def nautobot_vlan_status(status: str) -> str: return statuses[status] -def get_ext_attr_dict(extattrs: dict): +def get_ext_attr_dict(extattrs: dict, excluded_attrs: Optional[list] = None): """Rebuild Extensibility Attributes dict into standard k/v pattern. The standard extattrs dict pattern is to have the dict look like so: @@ -56,12 +56,17 @@ def get_ext_attr_dict(extattrs: dict): Args: extattrs (dict): Extensibility Attributes dict for object. + excluded_attrs (list): List of Extensibility Attributes to exclude. Returns: dict: Standardized dictionary for Extensibility Attributes. """ + if excluded_attrs is None: + excluded_attrs = [] fixed_dict = {} for key, value in extattrs.items(): + if key in excluded_attrs: + continue fixed_dict[slugify(key).replace("-", "_")] = value["value"] return fixed_dict @@ -81,7 +86,25 @@ def build_vlan_map(vlans: list): return vlan_map -def get_default_custom_fields(cf_contenttype: ContentType) -> dict: +def get_valid_custom_fields(cfs: dict, excluded_cfs: list): + """Remove custom fields that are on the excluded list. + + Args: + cfs: custom fields + excluded_cfs: list of excluded custom fields + """ + default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges"] + excluded_cfs.extend(default_excluded_cfs) + valid_cfs = {} + for cf_name, val in cfs.items(): + if cf_name in excluded_cfs: + continue + valid_cfs[cf_name] = val + + return valid_cfs + + +def get_default_custom_fields(cf_contenttype: ContentType, excluded_cfs: Optional[list] = None) -> dict: """Get default Custom Fields for specific ContentType. Args: @@ -90,16 +113,23 @@ def get_default_custom_fields(cf_contenttype: ContentType) -> dict: Returns: dict: Dictionary of all Custom Fields for a specific object type. """ + if excluded_cfs is None: + excluded_cfs = [] customfields = CustomField.objects.filter(content_types=cf_contenttype) + # These cfs are always excluded + default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges"] + # User defined excluded cfs + excluded_cfs.extend(default_excluded_cfs) default_cfs = {} for customfield in customfields: - if customfield.key != "ssot_synced_to_infoblox": - if customfield.key not in default_cfs: - default_cfs[customfield.key] = None + if customfield.key in excluded_cfs: + continue + if customfield.key not in default_cfs: + default_cfs[customfield.key] = None return default_cfs -def map_network_view_to_namespace(network_view: str) -> str: +def map_network_view_to_namespace(value: str, direction: str) -> str: """Remaps Infoblox Network View name to Nautobot Namespace name. This matters most for mapping default "default" Network View to default Namespace "Global". @@ -113,4 +143,33 @@ def map_network_view_to_namespace(network_view: str) -> str: network_view_to_namespace = { "default": "Global", } - return network_view_to_namespace.get(network_view, network_view) + namespace_to_network_view = {ns: nv for nv, ns in network_view_to_namespace.items()} + + if direction == "nv_to_ns": + return network_view_to_namespace.get(value, value) + if direction == "ns_to_nv": + return namespace_to_network_view.get(value, value) + + return None + + +def validate_dns_name(infoblox_client: object, dns_name: str, network_view: str) -> bool: + """Checks if DNS name matches any of the zones found in Infoblox. + + Args: + (object) infoblox_conn: Infoblox API client + (str) dns_name: DNS name + (str) network_view: network view name + + Returns: + (bool) + """ + dns_view = infoblox_client.get_dns_view_for_network_view(network_view=network_view) + zones = infoblox_client.get_authoritative_zones_for_dns_view(view=dns_view) + dns_name_valid = False + for zone in zones: + if zone["fqdn"] in dns_name: + dns_name_valid = True + break + + return dns_name_valid From 1981c6953f85839b6c38fbaf4bdf46e464dca40c Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:25:19 +0100 Subject: [PATCH 056/229] Enforce min Nautobot version for Infoblox integration. --- nautobot_ssot/__init__.py | 36 +++++++++++++++++++++++------------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index b1d554c5a..d0b54911d 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -3,9 +3,11 @@ import os from importlib import metadata + from django.conf import settings from nautobot.extras.plugins import NautobotAppConfig from nautobot.core.settings_funcs import is_truthy +import packaging from nautobot_ssot.integrations.utils import each_enabled_integration_module from nautobot_ssot.utils import logger @@ -22,6 +24,25 @@ "nautobot_ssot_servicenow", ] +_MIN_NAUTOBOT_VERSION = { + "nautobot_ssot_infoblox": "2.1", +} + + +def _check_min_nautobot_version_met(): + incompatible_apps_msg = [] + nautobot_version = metadata.version("nautobot") + for app, nb_ver in _MIN_NAUTOBOT_VERSION.items(): + if packaging.version.parse(nb_ver) > packaging.version.parse(nautobot_version): + incompatible_apps_msg.append(f"The `{app}` requires Nautobot version {nb_ver} or higher.\n") + print(incompatible_apps_msg) + + if incompatible_apps_msg: + raise RuntimeError( + f"This version of Nautobot ({nautobot_version}) does not meet minimum requirements for the following apps:\n {''.join(incompatible_apps_msg)}." + "See: https://docs.nautobot.com/projects/ssot/en/latest/admin/upgrade/#potential-apps-conflicts" + ) + def _check_for_conflicting_apps(): intersection = set(_CONFLICTING_APP_NAMES).intersection(set(settings.PLUGINS)) @@ -35,6 +56,8 @@ def _check_for_conflicting_apps(): if not is_truthy(os.getenv("NAUTOBOT_SSOT_ALLOW_CONFLICTING_APPS", "False")): _check_for_conflicting_apps() +_check_min_nautobot_version_met() + class NautobotSSOTAppConfig(NautobotAppConfig): """App configuration for the nautobot_ssot app.""" @@ -97,19 +120,6 @@ class NautobotSSOTAppConfig(NautobotAppConfig): "enable_ipfabric": False, "enable_servicenow": False, "hide_example_jobs": True, - "infoblox_default_status": "", - "infoblox_enable_rfc1918_network_containers": False, - "infoblox_enable_sync_to_infoblox": False, - "infoblox_import_objects_ip_addresses": False, - "infoblox_import_objects_subnets": False, - "infoblox_import_objects_vlan_views": False, - "infoblox_import_objects_vlans": False, - "infoblox_import_subnets": [], - "infoblox_password": "", - "infoblox_url": "", - "infoblox_username": "", - "infoblox_verify_ssl": True, - "infoblox_wapi_version": "", "ipfabric_api_token": "", "ipfabric_host": "", "ipfabric_ssl_verify": True, From 41c8639b2407c538e01ef56af552333f9f0f4b1b Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:25:35 +0100 Subject: [PATCH 057/229] Linting --- nautobot_ssot/models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/models.py b/nautobot_ssot/models.py index 073b58f46..8b28c34bc 100644 --- a/nautobot_ssot/models.py +++ b/nautobot_ssot/models.py @@ -208,7 +208,9 @@ def get_status_class(self): }.get(self.status) -class SSOTConfig(models.Model): +class SSOTConfig(models.Model): # pylint: disable=nb-incorrect-base-class + """Non-db model providing user permission constraints.""" + class Meta: managed = False default_permissions = ("view",) From 0adde51fc998b955e3c5b6d0fa6f52960b16912d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:26:12 +0100 Subject: [PATCH 058/229] Add template for displaying integration configs. --- .../templates/nautobot_ssot/ssot_configs.html | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 nautobot_ssot/templates/nautobot_ssot/ssot_configs.html diff --git a/nautobot_ssot/templates/nautobot_ssot/ssot_configs.html b/nautobot_ssot/templates/nautobot_ssot/ssot_configs.html new file mode 100644 index 000000000..db6c990fd --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot/ssot_configs.html @@ -0,0 +1,44 @@ +{% extends 'base.html' %} +{% load helpers %} + +{% block header %} +
    +
    + +
    +
    +
    +
    + +

    {% block title %}SSOT Configs{% endblock %}

    +{% endblock header %} + +{% block content %} +
    +
    +
    + SSOT Integration Configs +
    + + {% if perms.nautobot_ssot.view_ssotinfobloxconfig and "infoblox" in enabled_integrations %} + + + + + {% endif %} + {% if perms.nautobot_ssot.view_ssotservicenowconfig and "servicenow" in enabled_integrations %} + + + + + {% endif %} +
    Infoblox + Infoblox Configuration List +
    ServiceNow + ServiceNow Configuration Instance +
    +
    +
    +{% endblock content %} \ No newline at end of file From b07031f35fc07c5703e20f70c00d6dd0dfd62545 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 15:17:08 +0100 Subject: [PATCH 059/229] Update tests. --- .../tests/infoblox/test_infoblox_adapter.py | 1 - .../tests/infoblox/test_nautobot_adapter.py | 41 +++++++++++++++++-- 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 665df89f8..4bbae62cd 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -165,7 +165,6 @@ def test_load_prefixes_with_infoblox_import_subnets( }, ] self.infoblox_adapter.conn.get_all_subnets.side_effect = [one_nine_two_network] - self.infoblox_adapter.conn.remove_duplicates.side_effect = [ten_network + one_nine_two_network, ten_container] sync_filters = [{"network_view": "default", "prefixes_ipv4": ["10.0.0.0/8", "192.168.0.0/16"]}] self.infoblox_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) self.infoblox_adapter.conn.get_tree_from_container.assert_has_calls( diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 218b550f6..9d7ed4c9b 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -3,8 +3,8 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.models import RelationshipAssociation, Status -from nautobot.ipam.models import Prefix, VLAN, VLANGroup +from nautobot.extras.models import Relationship, RelationshipAssociation, Status +from nautobot.ipam.models import Namespace, Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship @@ -47,6 +47,7 @@ def setUp(self): status=active_status, vlan_group=vlan_group2, ) + namespace_dev, _ = Namespace.objects.get_or_create(name="dev") prefix1 = Prefix.objects.create( prefix="10.0.0.0/24", status=active_status, @@ -66,6 +67,18 @@ def setUp(self): status=active_status, type="Network", ) + Prefix.objects.create( + prefix="10.0.1.0/24", + status=active_status, + type="Network", + namespace=namespace_dev, + ) + Prefix.objects.create( + prefix="10.2.1.0/24", + status=active_status, + type="Network", + namespace=namespace_dev, + ) self.config = create_default_infoblox_config() self.sync_filters = self.config.infoblox_sync_filters self.nb_adapter = NautobotAdapter(config=self.config) @@ -83,8 +96,28 @@ def test_load_vlans_does_not_load_ungrouped_vlans(self): def test_load_prefixes_loads_prefixes(self): self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) - actual_prefixes = {prefix.network for prefix in self.nb_adapter.get_all("prefix")} - self.assertEqual(actual_prefixes, {"10.0.0.0/24", "10.0.1.0/24"}) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual(actual_prefixes, {("10.0.0.0/24", "Global"), ("10.0.1.0/24", "Global")}) + + def test_load_prefixes_loads_prefixes_dev_namespace(self): + sync_filters = [{"network_view": "dev"}] + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual( + actual_prefixes, + {("10.0.1.0/24", "dev"), ("10.2.1.0/24", "dev")}, + ) + + def test_load_prefixes_loads_prefixes_dev_namespace_ipv4_filter(self): + sync_filters = [{"network_view": "dev", "prefixes_ipv4": ["10.0.0.0/16"]}] + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual( + actual_prefixes, + { + ("10.0.1.0/24", "dev"), + }, + ) def test_load_prefixes_loads_prefixes_and_vlan_relationship(self): self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) From 34d15ded6fe0ce94f1ac48d0f21464367c2e3e98 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 15:17:24 +0100 Subject: [PATCH 060/229] Add tests for Infoblox model methods. --- .../tests/infoblox/test_infoblox_models.py | 631 ++++++++++++++++++ 1 file changed, 631 insertions(+) create mode 100644 nautobot_ssot/tests/infoblox/test_infoblox_models.py diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py new file mode 100644 index 000000000..59dd27f76 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -0,0 +1,631 @@ +"""Unit tests for the Infoblox Diffsync models.""" +import unittest +from unittest.mock import Mock + +from django.test import TestCase + +from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter +from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter + +from .fixtures_infoblox import create_default_infoblox_config + + +def _get_ip_address_dict(attrs): + ipaddress_dict = dict( # pylint: disable=use-dict-literal + description="Test IPAddress", + address="10.0.0.1", + status="Active", + prefix="10.0.0.0/8", + prefix_length=8, + ip_addr_type="host", + namespace="Global", + dns_name="", + ) + ipaddress_dict.update(attrs) + + return ipaddress_dict + + +class TestModelInfobloxIPAddressCreate(TestCase): + """Tests correct DNS record is created.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate A Record is created.""" + nb_ipaddress_atrs = {"has_a_record": True, "dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_a_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate A and PTR records are created.""" + nb_ipaddress_atrs = {"has_a_record": True, "has_ptr_record": True, "dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Host Record is created.""" + nb_ipaddress_atrs = {"has_host_record": True, "dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = True + self.config.create_a_record = False + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure no record is created if DNS name is missing.""" + nb_ipaddress_atrs = {"has_a_record": True, "dns_name": ""} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Cannot create Infoblox record for IP Address 10.0.0.1. DNS name is not defined." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + +class TestModelInfobloxIPAddressUpdate(TestCase): + """Tests validating IP Address Update scenarios.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure Host record is updated.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_a_record = False + self.config.create_ptr_record = False + self.config.create_host_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "host_record_ref": "record:host/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_host_record.assert_called_once() + infoblox_adapter.conn.update_host_record.assert_called_with( + ref="record:host/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure A record is updated.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "a_record_ref": "record:a/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure PTR record is created if one doesn't currently exist.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_a_record": True, + "has_ptr_record": False, + "a_record_ref": "record:a/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure A and PTR records are updated.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "a_record_ref": "record:a/xyz", + "ptr_record_ref": "record:ptr/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_ptr_record.assert_called_once() + infoblox_adapter.conn.update_ptr_record.assert_called_with( + ref="record:ptr/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if an A record is marked for update but Infoblox already has a Host record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "host_record_ref": "record:host/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + + log_msg = "Cannot update A Record for IP Address, 10.0.0.1. It already has an existing Host Record." + job_logger.warning.assert_called_with(log_msg) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if PTR record is marked for update but Infoblox already has a Host record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_ptr_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = False + self.config.create_a_record = True + self.config.create_ptr_record = True + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "host_record_ref": "record:host/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + + log_msg = ( + "Cannot create/update PTR Record for IP Address, 10.0.0.1. It already has an existing Host Record." + ) + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if Host record is marked for update but Infoblox already has an A record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = True + self.config.create_a_record = False + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "a_record_ref": "record:a/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + + log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing A Record." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if Host record is marked for update but Infoblox already has a PTR record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.create_host_record = True + self.config.create_a_record = False + self.config.create_ptr_record = False + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_ptr_record": True, + "ptr_record_ref": "record:ptr/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_validate_dns_name.assert_called_once() + + log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing PTR Record." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) From ec83c9f76399b88bdf667c839f1096e87f0db59b Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 15:17:49 +0100 Subject: [PATCH 061/229] Update msg. --- nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index fbd6fad6e..7bac0a1aa 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -149,7 +149,7 @@ def update(self, attrs): # pylint: disable=too-many-branches and inf_attrs["has_host_record"] ): incomp_msg = ( - f"Cannot create/update PTR Record for IP Address, {ip_address}. It already has an existing Host Record" + f"Cannot create/update PTR Record for IP Address, {ip_address}. It already has an existing Host Record." ) incompatible_record_types = True elif ( From 8f1fec3a48a71703fb53e29b4aa4c0efe20afbde Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 15:43:45 +0100 Subject: [PATCH 062/229] Remove unused code. --- nautobot_ssot/tests/infoblox/test_infoblox_adapter.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 4bbae62cd..f411d0a6e 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -238,9 +238,6 @@ def test_load_prefixes_add_duplicate_prefix( self.assertEqual(mock_extra_attr_dict.call_count, 2) mock_default_extra_attrs.assert_called_once() - # @unittest.mock.patch.dict( - # PLUGIN_CFG, [("infoblox_import_subnets", []), ("infoblox_import_objects_subnets_ipv6", True)] - # ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", autospec=True, From 81fe2d1c7f658062f742b6027fffa76372e9f9d0 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 15:46:53 +0100 Subject: [PATCH 063/229] Fix docstring. --- nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index a13671bb1..7c99be6b6 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -396,7 +396,7 @@ def create(cls, diffsync, ids, attrs): return None def update(self, attrs): - """Update VLAN object in Nautobot.""" + """Update Namespace object in Nautobot.""" _ns = OrmNamespace.objects.get(id=self.pk) if "ext_attrs" in attrs: process_ext_attrs(diffsync=self.diffsync, obj=_ns, extattrs=attrs["ext_attrs"]) From 9088a0655d40ae371b4886694ff9ab92c6a255ce Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 16:33:04 +0100 Subject: [PATCH 064/229] Remove sync filters from nautobot config. --- development/nautobot_config.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index a6c0b0773..b33319dc8 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -232,12 +232,6 @@ "infoblox_verify_ssl": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL", True)), "infoblox_wapi_version": os.getenv("NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION", "v2.12"), "infoblox_network_view": os.getenv("NAUTOBOT_SSOT_INFOBLOX_NETWORK_VIEW", ""), - "infoblox_sync_filters": [ - {"network_view": "default", "prefixes_ipv4": ["10.0.0.0/8"]}, - {"network_view": "dev", "prefixes_ipv4": ["10.0.0.0/8"]}, - {"network_view": "prod", "prefixes_ipv4": ["10.0.0.0/16"]}, - {"network_view": "test", "prefixes_ipv4": ["10.0.0.0/8"]}, - ], "ipfabric_api_token": os.getenv("NAUTOBOT_SSOT_IPFABRIC_API_TOKEN"), "ipfabric_host": os.getenv("NAUTOBOT_SSOT_IPFABRIC_HOST"), "ipfabric_ssl_verify": is_truthy(os.getenv("NAUTOBOT_SSOT_IPFABRIC_SSL_VERIFY", "False")), From 7c19250f42da6eb6e3c77cecbe506b8a5446c420 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 19:02:14 +0100 Subject: [PATCH 065/229] Remove unused import. --- nautobot_ssot/tests/infoblox/test_nautobot_adapter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 9d7ed4c9b..50e0b5a37 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -3,7 +3,7 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.models import Relationship, RelationshipAssociation, Status +from nautobot.extras.models import RelationshipAssociation, Status from nautobot.ipam.models import Namespace, Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter From f9085f4d0ce7653d3f74fef0a46c6c68bee23a85 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 19:06:19 +0100 Subject: [PATCH 066/229] Add debug messages. --- .../integrations/infoblox/diffsync/adapters/infoblox.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 84d9c7c29..b97a740fd 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -69,6 +69,8 @@ def load_network_views(self, sync_filters: dict): Args: sync_filter (dict): Sync filter containing sync rules """ + if self.job.debug: + self.job.logger.debug("Loading Network Views from Infoblox.") network_view_filters = {sf["network_view"] for sf in sync_filters if "network_view" in sf} try: networkviews = self.conn.get_network_views() @@ -159,6 +161,8 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list] = None): """Load InfobloxNetwork DiffSync model.""" + if self.job.debug: + self.job.logger.debug("Loading Subnets from Infoblox.") try: containers, subnets = self._load_all_prefixes_filtered( sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 @@ -236,6 +240,8 @@ def load_ipaddresses(self): def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" + if self.job.debug: + self.job.logger.debug("Loading VLAN Views from Infoblox.") try: vlanviews = self.conn.get_vlanviews() except requests.exceptions.HTTPError as err: From 4ce1d27de18771193dfd1b9b208ae664ea27b280 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 19:06:39 +0100 Subject: [PATCH 067/229] Linting. --- nautobot_ssot/tests/infoblox/fixtures_infoblox.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index c1cc2f6ca..34166c6ef 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -11,7 +11,14 @@ SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices, ) -from nautobot.extras.models import ExternalIntegration, Relationship, Secret, SecretsGroup, SecretsGroupAssociation, Status +from nautobot.extras.models import ( + ExternalIntegration, + Relationship, + Secret, + SecretsGroup, + SecretsGroupAssociation, + Status, +) from nautobot.ipam.models import Prefix, VLAN From c377ef5f1534daa40ed533fc94d1942419de737c Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 21:50:19 +0100 Subject: [PATCH 068/229] Linting. --- nautobot_ssot/integrations/infoblox/constant.py | 1 + .../integrations/infoblox/diffsync/models/__init__.py | 1 + .../integrations/infoblox/diffsync/models/infoblox.py | 2 +- .../integrations/infoblox/diffsync/models/nautobot.py | 2 ++ nautobot_ssot/integrations/infoblox/tables.py | 3 ++- nautobot_ssot/integrations/infoblox/urls.py | 1 + nautobot_ssot/integrations/infoblox/utils/diffsync.py | 1 + nautobot_ssot/integrations/infoblox/views.py | 1 + 8 files changed, 10 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index a1e8b530a..2bcb8b77c 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -1,2 +1,3 @@ """Constants for use with the Infoblox SSoT app.""" + TAG_COLOR = "40bfae" diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index 9dd609bf6..143b17cbd 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,4 +1,5 @@ """Initialize models for Nautobot and Infoblox.""" + from .nautobot import NautobotNamespace, NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan from .infoblox import InfobloxNamespace, InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 7bac0a1aa..184dcdd36 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -170,7 +170,7 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True if incompatible_record_types: - self.diffsync.job.logger.warning(incomp_msg) + self.diffsync.job.logger.warning(incomp_msg) # pylint: disable=possibly-used-before-assignment return super().update(attrs) a_record_action = "none" diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 7c99be6b6..46b4f45f8 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -223,6 +223,8 @@ def create(cls, diffsync, ids, attrs): else: diffsync.logger.warning(f"unable to determine IPAddress Type for {addr}, defaulting to 'Host'") ip_addr_type = "host" + else: + ip_addr_type = "host" if diffsync.job.debug: diffsync.job.logger.debug(f"Creating IP Address {addr}") _ip = OrmIPAddress( diff --git a/nautobot_ssot/integrations/infoblox/tables.py b/nautobot_ssot/integrations/infoblox/tables.py index e6739e541..d3161752e 100644 --- a/nautobot_ssot/integrations/infoblox/tables.py +++ b/nautobot_ssot/integrations/infoblox/tables.py @@ -1,4 +1,5 @@ """Tables implementation for SSOT Infoblox.""" + import django_tables2 as tables from nautobot.apps.tables import BaseTable, BooleanColumn, ButtonsColumn @@ -25,7 +26,7 @@ class Meta(BaseTable.Meta): """Meta attributes.""" model = SSOTInfobloxConfig - fields = ( + fields = ( # pylint: disable=nb-use-fields-all "name", "infoblox_url", "enable_sync_to_infoblox", diff --git a/nautobot_ssot/integrations/infoblox/urls.py b/nautobot_ssot/integrations/infoblox/urls.py index 53183c80b..0d3d67ea5 100644 --- a/nautobot_ssot/integrations/infoblox/urls.py +++ b/nautobot_ssot/integrations/infoblox/urls.py @@ -1,4 +1,5 @@ """URL patterns for nautobot-ssot-servicenow.""" + from django.urls import path from nautobot.apps.urls import NautobotUIViewSetRouter diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index c2f70b7b2..87a1b402c 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,4 +1,5 @@ """Utilities for DiffSync related stuff.""" + from typing import Optional from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify diff --git a/nautobot_ssot/integrations/infoblox/views.py b/nautobot_ssot/integrations/infoblox/views.py index 46d2b691e..9ef9206e7 100644 --- a/nautobot_ssot/integrations/infoblox/views.py +++ b/nautobot_ssot/integrations/infoblox/views.py @@ -1,4 +1,5 @@ """Views implementation for SSOT Infoblox.""" + from nautobot.extras.views import ObjectChangeLogView, ObjectNotesView from nautobot.apps.views import ( ObjectDestroyViewMixin, From 7d0c91aededa8d2d0b25dbb8f21cf75b4a815cd9 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 21:50:46 +0100 Subject: [PATCH 069/229] Ensure status exists. --- nautobot_ssot/tests/infoblox/fixtures_infoblox.py | 6 ++++-- nautobot_ssot/tests/infoblox/test_infoblox_models.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index 34166c6ef..d83bddcd6 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -19,7 +19,7 @@ SecretsGroupAssociation, Status, ) -from nautobot.ipam.models import Prefix, VLAN +from nautobot.ipam.models import IPAddress, Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.utils import client @@ -37,7 +37,9 @@ def _json_read_fixture(name): def create_default_infoblox_config(infoblox_url="infoblox.example.com"): - default_status = Status.objects.get(name="Active") + default_status, _ = Status.objects.get_or_create(name="Active") + for model in [IPAddress, Prefix, VLAN, VLANGroup]: + default_status.content_types.add(ContentType.objects.get_for_model(model)) infoblox_sync_filters = [{"network_view": "default"}] secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTUnitTesting") infoblox_username, _ = Secret.objects.get_or_create( diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index 59dd27f76..910d1952c 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -1,4 +1,5 @@ """Unit tests for the Infoblox Diffsync models.""" + import unittest from unittest.mock import Mock From 1cdd33a91fdc2f366af2503fa71305ec66f89895 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 30 Apr 2024 18:30:41 +0100 Subject: [PATCH 070/229] Add Namespace models --- nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index 143b17cbd..9dd609bf6 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,5 +1,4 @@ """Initialize models for Nautobot and Infoblox.""" - from .nautobot import NautobotNamespace, NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan from .infoblox import InfobloxNamespace, InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN From a72a532108ada7e60a189530b2a95f7b38e85daa Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 17 May 2024 20:40:46 +0100 Subject: [PATCH 071/229] Update tests. --- nautobot_ssot/tests/infoblox/test_tags_and_cfs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index c935399b6..dce2530c1 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -15,6 +15,8 @@ from .fixtures_infoblox import create_default_infoblox_config +from .fixtures_infoblox import create_default_infoblox_config + class TestTagging(TestCase): """Tests ensuring tagging is applied to objects synced from and to Infoblox.""" From 7d97af3afccc31ebf862103a5c23e643de1b2954 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:20:59 +0100 Subject: [PATCH 072/229] Remove legacy config. --- nautobot_ssot/integrations/infoblox/constant.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index 2bcb8b77c..a1e8b530a 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -1,3 +1,2 @@ """Constants for use with the Infoblox SSoT app.""" - TAG_COLOR = "40bfae" From 9fce5d0b35525b74a437c2ae3ff1d5d01d441045 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 20 May 2024 14:22:36 +0100 Subject: [PATCH 073/229] - Allow excluding attributes from sync. - Add DNS name validation. - Update network view to namespace mapping logic. --- nautobot_ssot/integrations/infoblox/utils/diffsync.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index 87a1b402c..c2f70b7b2 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,5 +1,4 @@ """Utilities for DiffSync related stuff.""" - from typing import Optional from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify From e3e0c77ee2c80f409ed0c5ef0031fea39c0b61a3 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 21 May 2024 12:00:03 +0100 Subject: [PATCH 074/229] Linting. --- nautobot_ssot/integrations/infoblox/constant.py | 1 + nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py | 1 + nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py | 2 +- nautobot_ssot/integrations/infoblox/utils/diffsync.py | 1 + nautobot_ssot/tests/infoblox/test_tags_and_cfs.py | 2 -- 5 files changed, 4 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index a1e8b530a..2bcb8b77c 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -1,2 +1,3 @@ """Constants for use with the Infoblox SSoT app.""" + TAG_COLOR = "40bfae" diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index 9dd609bf6..143b17cbd 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,4 +1,5 @@ """Initialize models for Nautobot and Infoblox.""" + from .nautobot import NautobotNamespace, NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan from .infoblox import InfobloxNamespace, InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 184dcdd36..7bac0a1aa 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -170,7 +170,7 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True if incompatible_record_types: - self.diffsync.job.logger.warning(incomp_msg) # pylint: disable=possibly-used-before-assignment + self.diffsync.job.logger.warning(incomp_msg) return super().update(attrs) a_record_action = "none" diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index c2f70b7b2..87a1b402c 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,4 +1,5 @@ """Utilities for DiffSync related stuff.""" + from typing import Optional from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index dce2530c1..c935399b6 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -15,8 +15,6 @@ from .fixtures_infoblox import create_default_infoblox_config -from .fixtures_infoblox import create_default_infoblox_config - class TestTagging(TestCase): """Tests ensuring tagging is applied to objects synced from and to Infoblox.""" From 16cfd7bcf6bbb3fc16f5fa451350e1b8b87709f0 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 21 May 2024 15:26:09 +0100 Subject: [PATCH 075/229] Remove debug msg. --- nautobot_ssot/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index d0b54911d..7c3d62f52 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -35,7 +35,6 @@ def _check_min_nautobot_version_met(): for app, nb_ver in _MIN_NAUTOBOT_VERSION.items(): if packaging.version.parse(nb_ver) > packaging.version.parse(nautobot_version): incompatible_apps_msg.append(f"The `{app}` requires Nautobot version {nb_ver} or higher.\n") - print(incompatible_apps_msg) if incompatible_apps_msg: raise RuntimeError( From 06da70b1e59e8e4e1f0e227191962807e6d4329d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 13:24:38 +0100 Subject: [PATCH 076/229] Change defaults for some fields. --- nautobot_ssot/integrations/infoblox/models.py | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py index beae69257..d4b8ae75a 100644 --- a/nautobot_ssot/integrations/infoblox/models.py +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -13,10 +13,15 @@ def _get_default_sync_filters(): - """Provides default value for infoblox_sync_filters field.""" + """Provides default value for SSOTInfobloxConfig infoblox_sync_filters field.""" return [{"network_view": "default"}] +def _get_default_cf_fields_ignore(): + """Provides default value for SSOTInfobloxConfig cf_fields_ignore field.""" + return {"extensible_attributes": [], "custom_fields": []} + + class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors """SSOT Infoblox Configuration model.""" @@ -62,8 +67,8 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors verbose_name="Import VLANs", ) infoblox_sync_filters = models.JSONField(default=_get_default_sync_filters, encoder=DjangoJSONEncoder) - infoblox_dns_view_mapping = models.JSONField(default=dict, encoder=DjangoJSONEncoder) - cf_fields_ignore = models.JSONField(default=dict, encoder=DjangoJSONEncoder) + infoblox_dns_view_mapping = models.JSONField(default=dict, encoder=DjangoJSONEncoder, blank=True) + cf_fields_ignore = models.JSONField(default=_get_default_cf_fields_ignore, encoder=DjangoJSONEncoder, blank=True) import_ipv4 = models.BooleanField( default=True, verbose_name="Import IPv4", @@ -123,7 +128,7 @@ def _clean_infoblox_sync_filters(self): # pylint: disable=too-many-branches invalid_keys = set(sync_filter.keys()) - allowed_keys if invalid_keys: raise ValidationError( - {"infoblox_sync_filters": f"Invalid keys found in the sync filter: {''.join(invalid_keys)}."} + {"infoblox_sync_filters": f"Invalid keys found in the sync filter: {', '.join(invalid_keys)}."} ) if "network_view" not in sync_filter: @@ -151,7 +156,9 @@ def _clean_infoblox_sync_filters(self): # pylint: disable=too-many-branches try: if "/" not in prefix: raise ValidationError( - {"infoblox_sync_filters": f"IPv4 prefix must have a prefix length: {prefix}."} + { + "infoblox_sync_filters": f"IPv4 prefix must have a prefix length defined using `/` format: {prefix}." + } ) ipaddress.IPv4Network(prefix) except (ValueError, TypeError) as error: @@ -170,7 +177,9 @@ def _clean_infoblox_sync_filters(self): # pylint: disable=too-many-branches try: if "/" not in prefix: raise ValidationError( - {"infoblox_sync_filters": f"IPv6 prefix must have a prefix length: {prefix}."} + { + "infoblox_sync_filters": f"IPv6 prefix must have a prefix length defined using `/` format: {prefix}." + } ) ipaddress.IPv6Network(prefix) except (ValueError, TypeError) as error: @@ -178,10 +187,10 @@ def _clean_infoblox_sync_filters(self): # pylint: disable=too-many-branches {"infoblox_sync_filters": f"IPv6 prefix parsing error: {str(error)}."} ) - def _clean_secrets_group(self): - """Performs validation of the secrets_group field.""" + def _clean_infoblox_instance(self): + """Performs validation of the infoblox_instance field.""" if not self.infoblox_instance.secrets_group: - raise ValidationError({"secrets_group": "Infoblox instance must have Secrets groups assigned."}) + raise ValidationError({"infoblox_instance": "Infoblox instance must have Secrets groups assigned."}) try: self.infoblox_instance.secrets_group.get_secret_value( access_type=SecretsGroupAccessTypeChoices.TYPE_REST, @@ -190,7 +199,7 @@ def _clean_secrets_group(self): except SecretsGroupAssociation.DoesNotExist: raise ValidationError( # pylint: disable=raise-missing-from { - "secrets_group": "Secrets group for the Infoblox instance must have secret with type Username and access type REST." + "infoblox_instance": "Secrets group for the Infoblox instance must have secret with type Username and access type REST." } ) try: @@ -201,7 +210,7 @@ def _clean_secrets_group(self): except SecretsGroupAssociation.DoesNotExist: raise ValidationError( # pylint: disable=raise-missing-from { - "secrets_group": "Secrets group for the Infoblox instance must have secret with type Password and access type REST." + "infoblox_instance": "Secrets group for the Infoblox instance must have secret with type Password and access type REST." } ) @@ -284,7 +293,7 @@ def clean(self): super().clean() self._clean_infoblox_sync_filters() - self._clean_secrets_group() + self._clean_infoblox_instance() self._clean_import_ip() self._clean_ip_address_create_options() self._clean_infoblox_dns_view_mapping() From 215320af3e778c682ffe0ced26caac571e69f291 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 13:25:13 +0100 Subject: [PATCH 077/229] Handle existing sg. Add additional fields to InfobloxConfig. --- nautobot_ssot/integrations/infoblox/signals.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index 9e089a280..4c1168f88 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -141,12 +141,14 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "secret": infoblox_password, }, ) - external_integration = ExternalIntegration.objects.create( + external_integration, _ = ExternalIntegration.objects.get_or_create( name="MigratedInfobloxInstance", - remote_url=str(config.get("infoblox_url", "https://replace.me.local")), - secrets_group=secrets_group, - verify_ssl=bool(config.get("infoblox_verify_ssl", True)), - timeout=infoblox_request_timeout, + defaults=dict( # pylint: disable=use-dict-literal + remote_url=str(config.get("infoblox_url", "https://replace.me.local")), + secrets_group=secrets_group, + verify_ssl=bool(config.get("infoblox_verify_ssl", True)), + timeout=infoblox_request_timeout, + ), ) SSOTInfobloxConfig.objects.create( @@ -164,6 +166,11 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa import_ipv6=bool(config.get("infoblox_import_objects_subnets_ipv6", False)), job_enabled=True, infoblox_sync_filters=infoblox_sync_filters, + infoblox_dns_view_mapping={}, + cf_fields_ignore={}, + create_a_record=False, + create_host_record=True, + create_ptr_record=False, ) From 32991879d7f9b5d4ddfb2ad79b879113883f074a Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 13:25:32 +0100 Subject: [PATCH 078/229] Update migrations. --- .../migrations/0009_ssotconfig_ssotinfobloxconfig.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py index e100e1845..52e2784a5 100644 --- a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py +++ b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-05-17 18:53 +# Generated by Django 3.2.23 on 2024-05-21 17:21 import django.core.serializers.json from django.db import migrations, models @@ -10,6 +10,7 @@ class Migration(migrations.Migration): + dependencies = [ ("extras", "0102_set_null_objectchange_contenttype"), ("nautobot_ssot", "0008_auto_20240110_1019"), @@ -58,11 +59,15 @@ class Migration(migrations.Migration): ), ( "infoblox_dns_view_mapping", - models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), ), ( "cf_fields_ignore", - models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + models.JSONField( + blank=True, + default=nautobot_ssot.integrations.infoblox.models._get_default_cf_fields_ignore, + encoder=django.core.serializers.json.DjangoJSONEncoder, + ), ), ("import_ipv4", models.BooleanField(default=True)), ("import_ipv6", models.BooleanField(default=False)), From e0dc50e29040a769ecd767745fee33cbfeb6a014 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 13:25:53 +0100 Subject: [PATCH 079/229] Add tests for Infoblox Config model. --- nautobot_ssot/tests/infoblox/test_models.py | 501 ++++++++++++++++++++ 1 file changed, 501 insertions(+) create mode 100644 nautobot_ssot/tests/infoblox/test_models.py diff --git a/nautobot_ssot/tests/infoblox/test_models.py b/nautobot_ssot/tests/infoblox/test_models.py new file mode 100644 index 000000000..bf62ad911 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_models.py @@ -0,0 +1,501 @@ +# pylint: disable=R0801 +"""Infoblox Integration model tests.""" +import os +from unittest import mock +from copy import deepcopy + +from django.core.exceptions import ValidationError +from django.test import TestCase +from nautobot.extras.choices import ( + SecretsGroupAccessTypeChoices, + SecretsGroupSecretTypeChoices, +) +from nautobot.extras.models import ExternalIntegration, Secret, SecretsGroup, SecretsGroupAssociation, Status + + +from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig + + +@mock.patch.dict(os.environ, {"INFOBLOX_USERNAME": "username", "INFOBLOX_PASSWORD": "password"}) +class SSOTInfobloxConfigTestCase(TestCase): # pylint: disable=too-many-public-methods + """Tests for the HardwareLCM models.""" + + def setUp(self): + """Setup testing.""" + self.default_status, _ = Status.objects.get_or_create(name="Active") + sync_filters = [{"network_view": "default"}] + + infoblox_request_timeout = 60 + secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTUnitTest") + inf_username, _ = Secret.objects.get_or_create( + name="Infoblox Username - InfobloxSSOTUnitTest", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "INFOBLOX_USERNAME"}, + }, + ) + inf_password, _ = Secret.objects.get_or_create( + name="Infoblox Password - InfobloxSSOTUnitTest", + defaults={ + "provider": "environment-variable", + "parameters": {"variable": "INFOBLOX_PASSWORD"}, + }, + ) + self.sg_username, _ = SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + defaults={ + "secret": inf_username, + }, + ) + self.sg_password, _ = SecretsGroupAssociation.objects.get_or_create( + secrets_group=secrets_group, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + defaults={ + "secret": inf_password, + }, + ) + self.external_integration = ExternalIntegration.objects.create( + name="InfobloxModelUnitTestInstance", + remote_url="https://infoblox..me.local", + secrets_group=secrets_group, + verify_ssl=True, + timeout=infoblox_request_timeout, + ) + + self.infoblox_config_dict = { + "name": "InfobloxModelUnitTestConfig", + "description": "Unit Test Config", + "default_status": self.default_status, + "infoblox_wapi_version": "v2.12", + "infoblox_instance": self.external_integration, + "enable_sync_to_infoblox": True, + "import_ip_addresses": True, + "import_subnets": True, + "import_vlan_views": True, + "import_vlans": True, + "import_ipv4": True, + "import_ipv6": False, + "job_enabled": True, + "infoblox_sync_filters": sync_filters, + "infoblox_dns_view_mapping": {"default": "default.default"}, + "cf_fields_ignore": {"extensible_attributes": [], "custom_fields": []}, + "create_a_record": False, + "create_host_record": True, + "create_ptr_record": False, + } + + def test_create_infoblox_config_required_fields_only(self): + """Successfully create Infoblox config with required fields only.""" + inf_cfg = SSOTInfobloxConfig( + name="InfobloxModelUnitTestConfigReqOnly", + default_status=self.default_status, + infoblox_instance=self.external_integration, + ) + inf_cfg.validated_save() + + inf_cfg_db = SSOTInfobloxConfig.objects.get(name="InfobloxModelUnitTestConfigReqOnly") + + self.assertEqual(inf_cfg_db.name, "InfobloxModelUnitTestConfigReqOnly") + self.assertEqual(inf_cfg_db.description, "") + self.assertEqual(inf_cfg_db.default_status, self.default_status) + self.assertEqual(inf_cfg_db.infoblox_instance, self.external_integration) + self.assertEqual(inf_cfg_db.infoblox_wapi_version, "v2.12") + self.assertEqual(inf_cfg_db.enable_sync_to_infoblox, False) + self.assertEqual(inf_cfg_db.import_ip_addresses, False) + self.assertEqual(inf_cfg_db.import_subnets, False) + self.assertEqual(inf_cfg_db.import_vlan_views, False) + self.assertEqual(inf_cfg_db.import_vlans, False) + self.assertEqual(inf_cfg_db.infoblox_sync_filters, [{"network_view": "default"}]) + self.assertEqual(inf_cfg_db.infoblox_dns_view_mapping, {}) + self.assertEqual(inf_cfg_db.cf_fields_ignore, {"custom_fields": [], "extensible_attributes": []}) + self.assertEqual(inf_cfg_db.import_ipv4, True) + self.assertEqual(inf_cfg_db.import_ipv6, False) + self.assertEqual(inf_cfg_db.create_host_record, True) + self.assertEqual(inf_cfg_db.create_a_record, False) + self.assertEqual(inf_cfg_db.create_ptr_record, False) + self.assertEqual(inf_cfg_db.job_enabled, False) + + def test_create_infoblox_config_all_fields(self): + """Successfully create Infoblox config with all field.""" + inf_cfg = SSOTInfobloxConfig( + name="InfobloxModelUnitTestConfigAllFields", + default_status=self.default_status, + infoblox_instance=self.external_integration, + infoblox_wapi_version="v2.12", + enable_sync_to_infoblox=True, + import_ip_addresses=True, + import_subnets=True, + import_vlan_views=True, + import_vlans=True, + import_ipv4=False, + import_ipv6=True, + job_enabled=True, + infoblox_sync_filters=[{"network_view": "dev"}], + infoblox_dns_view_mapping={"default": "default.default"}, + cf_fields_ignore={"extensible_attributes": ["aws_id"], "custom_fields": ["po_no"]}, + create_a_record=True, + create_host_record=False, + create_ptr_record=True, + ) + inf_cfg.validated_save() + + inf_cfg_db = SSOTInfobloxConfig.objects.get(name="InfobloxModelUnitTestConfigAllFields") + + self.assertEqual(inf_cfg_db.name, "InfobloxModelUnitTestConfigAllFields") + self.assertEqual(inf_cfg_db.description, "") + self.assertEqual(inf_cfg_db.default_status, self.default_status) + self.assertEqual(inf_cfg_db.infoblox_instance, self.external_integration) + self.assertEqual(inf_cfg_db.infoblox_wapi_version, "v2.12") + self.assertEqual(inf_cfg_db.enable_sync_to_infoblox, True) + self.assertEqual(inf_cfg_db.import_ip_addresses, True) + self.assertEqual(inf_cfg_db.import_subnets, True) + self.assertEqual(inf_cfg_db.import_vlan_views, True) + self.assertEqual(inf_cfg_db.import_vlans, True) + self.assertEqual(inf_cfg_db.infoblox_sync_filters, [{"network_view": "dev"}]) + self.assertEqual(inf_cfg_db.infoblox_dns_view_mapping, {"default": "default.default"}) + self.assertEqual(inf_cfg_db.cf_fields_ignore, {"extensible_attributes": ["aws_id"], "custom_fields": ["po_no"]}) + self.assertEqual(inf_cfg_db.import_ipv4, False) + self.assertEqual(inf_cfg_db.import_ipv6, True) + self.assertEqual(inf_cfg_db.create_host_record, False) + self.assertEqual(inf_cfg_db.create_a_record, True) + self.assertEqual(inf_cfg_db.create_ptr_record, True) + self.assertEqual(inf_cfg_db.job_enabled, True) + + def test_infoblox_sync_filters_must_be_a_list(self): + """infoblox_sync_filters must be a list.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = {"k": "v"} + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Sync filters must be a list.") + + def test_infoblox_sync_filters_filter_must_be_dict(self): + """Individual filter in infoblox_sync_filters must be a dict.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [""] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Sync filter must be a dict.") + + def test_infoblox_sync_filters_invalid_key_found(self): + """Only keys allowed in a filter are `network_view`, `prefixes_ipv4` and `prefixes_ipv6`.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"prefixes": [], "name": "myname", "network_view": "dev"}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertIn("Invalid keys found in the sync filter", failure_exception.exception.messages[0]) + + def test_infoblox_sync_filters_no_network_view_key(self): + """Prefix filter must have a `network_view` key defined.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"prefixes_ipv4": ["10.0.0.0/24"]}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Sync filter must have `network_view` key defined.") + + def test_infoblox_sync_filters_network_view_invalid_type(self): + """Key `network_view` must be a string.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": []}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Value of the `network_view` key must be a string.") + + def test_infoblox_sync_filters_duplicate_network_view(self): + """Duplicate values for `network_view` are not allowed.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev"}, {"network_view": "dev"}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Duplicate value for the `network_view` found: dev.") + + def test_infoblox_sync_filters_prefixes_ipv4_must_be_list(self): + """Value of `prefixes_ipv4` key must be a list.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv4": ""}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Value of the `prefixes_ipv4` key must be a list.") + + def test_infoblox_sync_filters_prefixes_ipv4_must_not_be_an_empty_list(self): + """Value of `prefixes_ipv4` key must not be an empty list.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv4": []}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "Value of the `prefixes_ipv4` key must not be an empty list." + ) + + def test_infoblox_sync_filters_prefixes_ipv4_must_have_prefix_length(self): + """Prefix in `prefixes_ipv4` must have prefix length defined.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv4": ["10.0.0.0"]}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "IPv4 prefix must have a prefix length defined using `/` format: 10.0.0.0.", + ) + + def test_infoblox_sync_filters_prefixes_ipv4_must_be_valid_prefix(self): + """Prefix in `prefixes_ipv4` must be valid.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv4": ["10.0.0/24"]}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertIn("IPv4 prefix parsing error", failure_exception.exception.messages[0]) + + def test_infoblox_sync_filters_prefixes_ipv6_must_be_list(self): + """Value of `prefixes_ipv6` key must be a list.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv6": ""}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "Value of the `prefixes_ipv6` key must be a list.") + + def test_infoblox_sync_filters_prefixes_ipv6_must_not_be_an_empty_list(self): + """Value of `prefixes_ipv6` key must not be an empty list.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv6": []}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "Value of the `prefixes_ipv6` key must not be an empty list." + ) + + def test_infoblox_sync_filters_prefixes_ipv6_must_have_prefix_length(self): + """Prefix in `prefixes_ipv6` must have prefix length defined.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv6": ["2001:5b0:4100::"]}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "IPv6 prefix must have a prefix length defined using `/` format: 2001:5b0:4100::.", + ) + + def test_infoblox_sync_filters_prefixes_ipv6_must_be_valid_prefix(self): + """Prefix in `prefixes_ipv6` must be valid.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_sync_filters"] = [{"network_view": "dev", "prefixes_ipv6": ["2001::5b0:4100::/40"]}] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_sync_filters", failure_exception.exception.error_dict) + self.assertIn("IPv6 prefix parsing error", failure_exception.exception.messages[0]) + + def test_infoblox_instance_must_have_secrets_group(self): + """External integration for Infoblox instance must have secrets group assigned.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_instance"].secrets_group = None + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_instance", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "Infoblox instance must have Secrets groups assigned." + ) + + def test_infoblox_instance_must_have_secrets_rest_username(self): + """Secrets associated with secret group used by Infoblox Instance must be of correct type.""" + inf_dict = deepcopy(self.infoblox_config_dict) + infoblox_config = SSOTInfobloxConfig(**inf_dict) + self.sg_username.secret_type = SecretsGroupSecretTypeChoices.TYPE_TOKEN + self.sg_username.save() + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_instance", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "Secrets group for the Infoblox instance must have secret with type Username and access type REST.", + ) + self.sg_username.secret_type = SecretsGroupSecretTypeChoices.TYPE_USERNAME + self.sg_username.save() + self.sg_password.access_type = SecretsGroupAccessTypeChoices.TYPE_CONSOLE + self.sg_password.save() + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_instance", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "Secrets group for the Infoblox instance must have secret with type Password and access type REST.", + ) + self.sg_password.access_type = SecretsGroupAccessTypeChoices.TYPE_REST + self.sg_password.save() + + def test_infoblox_import_ip_at_least_one_chosen(self): + """At least one of `import_ipv4` or `import_ipv6` must be selected.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["import_ipv4"] = False + inf_dict["import_ipv6"] = False + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("import_ipv4", failure_exception.exception.error_dict) + self.assertIn("import_ipv6", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.error_dict["import_ipv4"][0].message, + "At least one of `import_ipv4` or `import_ipv6` must be set to True.", + ) + self.assertEqual( + failure_exception.exception.error_dict["import_ipv6"][0].message, + "At least one of `import_ipv4` or `import_ipv6` must be set to True.", + ) + + def test_infoblox_incompatible_ip_address_create_options(self): + """Only one of `create_a_record` or `create_host_record` can be enabled at any given time. + `create_ptr` cannot be used with `create_host_record`. + """ + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["create_a_record"] = True + inf_dict["create_host_record"] = True + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("create_a_record", failure_exception.exception.error_dict) + self.assertIn("create_host_record", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.error_dict["create_a_record"][0].message, + "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", + ) + self.assertEqual( + failure_exception.exception.error_dict["create_host_record"][0].message, + "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", + ) + + inf_dict["create_a_record"] = False + inf_dict["create_ptr_record"] = True + inf_dict["create_host_record"] = True + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("create_host_record", failure_exception.exception.error_dict) + self.assertIn("create_ptr_record", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.error_dict["create_host_record"][0].message, + "`create_ptr_record` can be used with `create_a_record` only.", + ) + self.assertEqual( + failure_exception.exception.error_dict["create_ptr_record"][0].message, + "`create_ptr_record` can be used with `create_a_record` only.", + ) + + def test_infoblox_ptr_record_requires_a_record(self): + """Using `create_ptr_record` required `create_a_record` to be enabled.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["create_host_record"] = False + inf_dict["create_a_record"] = False + inf_dict["create_ptr_record"] = True + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("create_ptr_record", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "To use `create_ptr_record` you must enable `create_a_record`." + ) + + def test_infoblox_at_least_one_of_a_or_host_record_required(self): + """At least one of `create_a_record` or `create_host_record` must be selected.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["create_a_record"] = False + inf_dict["create_host_record"] = False + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("create_a_record", failure_exception.exception.error_dict) + self.assertIn("create_host_record", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.error_dict["create_a_record"][0].message, + "Either `create_a_record` or `create_host_record` must be enabled.", + ) + self.assertEqual( + failure_exception.exception.error_dict["create_host_record"][0].message, + "Either `create_a_record` or `create_host_record` must be enabled.", + ) + + def test_infoblox_infoblox_dns_view_mapping_must_be_dict(self): + """Value of `infoblox_dns_view_mapping` key must be a dict.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["infoblox_dns_view_mapping"] = [] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("infoblox_dns_view_mapping", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "`infoblox_dns_view_mapping` must be a dictionary mapping network view names to dns view names.", + ) + + def test_infoblox_infoblox_cf_fields_ignore_must_be_dict(self): + """Value of `cf_fields_ignore` key must be a dict.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["cf_fields_ignore"] = [] + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("cf_fields_ignore", failure_exception.exception.error_dict) + self.assertEqual(failure_exception.exception.messages[0], "`cf_fields_ignore` must be a dictionary.") + + def test_infoblox_infoblox_cf_fields_key_names_must_be_valid(self): + """Only `extensible_attributes` and `custom_fields` keys are allowed in `cf_fields_ignore`.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["cf_fields_ignore"] = {"fields": []} + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("cf_fields_ignore", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], + "Invalid key name `fields`. Only `extensible_attributes` and `custom_fields` are allowed.", + ) + + def test_infoblox_infoblox_cf_fields_values_must_be_list_of_string(self): + """`infoblox_cf_fields` key values must be list of strings.""" + inf_dict = deepcopy(self.infoblox_config_dict) + inf_dict["cf_fields_ignore"] = {"extensible_attributes": ["ea1", 2]} + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("cf_fields_ignore", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "Value of key `extensible_attributes` must be a list of strings." + ) + + inf_dict["cf_fields_ignore"] = {"custom_fields": ["cf1", 2]} + infoblox_config = SSOTInfobloxConfig(**inf_dict) + with self.assertRaises(ValidationError) as failure_exception: + infoblox_config.full_clean() + self.assertIn("cf_fields_ignore", failure_exception.exception.error_dict) + self.assertEqual( + failure_exception.exception.messages[0], "Value of key `custom_fields` must be a list of strings." + ) From c1b36dfc63e9a09982881183473426b60f67c8f9 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 17:25:24 +0100 Subject: [PATCH 080/229] Update heading in edit template. --- .../nautobot_ssot_infoblox/ssotinfobloxconfig_update.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html index 225171dee..afd16457f 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -3,7 +3,7 @@ {% block form %}
    -
    Config Context
    +
    Infoblox Config
    {% render_field form.name %} {% render_field form.description %} From a8db9122d3af16f8c7ac4c170de701481b571dbd Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 22 May 2024 17:40:50 +0100 Subject: [PATCH 081/229] Fix ipv6 bools in adapters. Remove unused template. --- .../infoblox/diffsync/adapters/infoblox.py | 2 +- .../infoblox/diffsync/adapters/nautobot.py | 2 +- .../ssotinfobloxconfig_changelog.html | 105 ------------------ 3 files changed, 2 insertions(+), 107 deletions(-) delete mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index b97a740fd..603036a15 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -285,7 +285,7 @@ def load_vlans(self): def load(self): """Load all models by calling other methods.""" include_ipv4 = self.config.import_ipv4 - include_ipv6 = self.config.import_ipv4 + include_ipv6 = self.config.import_ipv6 sync_filters = self.config.infoblox_sync_filters self.load_network_views(sync_filters=sync_filters) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index f926c81ab..c80719c03 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -361,7 +361,7 @@ def load_vlans(self): def load(self): """Load models with data from Nautobot.""" include_ipv4 = self.config.import_ipv4 - include_ipv6 = self.config.import_ipv4 + include_ipv6 = self.config.import_ipv6 sync_filters = self.config.infoblox_sync_filters self.relationship_map = {r.label: r.id for r in Relationship.objects.only("id", "label")} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html deleted file mode 100644 index b51955b0b..000000000 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html +++ /dev/null @@ -1,105 +0,0 @@ -{% extends 'generic/object_retrieve.html' %} -{% load helpers %} -{% load buttons %} - -{% block breadcrumbs %} -
  • SSOT Configs
  • -
  • SSOT Infoblox Configs
  • -
  • {{ object|hyperlinked_object }}
  • -{% endblock breadcrumbs %} - -{% block extra_buttons %} - -{% endblock extra_buttons %} - -{% block masthead %} -

    - {% block title %}{{ object }}{% endblock title %} -

    -{% endblock masthead %} - -{% block content_left_page %} -
    -
    - Infoblox Config -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Name{{ object.name }}
    Description{{ object.description|placeholder }}
    Infoblox Instance{{ object.infoblox_instance|hyperlinked_object }}
    Default Status for Imported Objects{{ object.default_status|hyperlinked_object }}
    Infoblox WAPI Version{{ object.infoblox_wapi_version|placeholder }}
    Enable Sync from Nautobot to Infoblox{{ object.enable_sync_to_infoblox }}
    Import IP Addresses from Infoblox{{ object.import_ip_addresses }}
    Import VLANs from Infoblox{{ object.import_vlans }}
    Import VLAN Views from Infoblox{{ object.import_vlan_views }}
    Import IPv4 from Infoblox{{ object.import_ipv4 }}
    Import IPv6 from Infoblox{{ object.import_ipv6 }}
    Can be used in Sync Job{{ object.job_enabled }}
    -
    -{% endblock %} - -{% block content_right_page %} -
    -
    - Infoblox Sync Filters -
    - - - - -
    - {% include 'extras/inc/json_data.html' with data=object.infoblox_sync_filters format="json" %} -
    -
    -
    -
    - Infoblox Sync Filters -
    - - - - -
    - {% include 'extras/inc/json_data.html' with data=object.infoblox_dns_view_mapping format="json" %} -
    -
    - -{% endblock %} \ No newline at end of file From aafd50b440f85191b1c53e9b75015d448b0e23bc Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 23 May 2024 11:40:45 +0100 Subject: [PATCH 082/229] Remove debug msg. --- nautobot_ssot/integrations/infoblox/jobs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index 4c011a8bb..386b11f39 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -81,7 +81,6 @@ def load_source_adapter(self): """Load Infoblox data.""" self.logger.info("Connecting to Infoblox") client_config = _get_infoblox_client_config(self.config, self.debug) - self.logger.info(client_config) client = InfobloxApi(**client_config) self.source_adapter = infoblox.InfobloxAdapter(job=self, sync=self.sync, conn=client, config=self.config) self.logger.info("Loading data from Infoblox...") From ae9b55b22254e66223c3c8c084c3ba25ba923e5f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 23 May 2024 11:46:05 +0100 Subject: [PATCH 083/229] Remove debug msg. --- nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 7bac0a1aa..e10190016 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -91,7 +91,6 @@ def create(cls, diffsync, ids, attrs): network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") dns_name = attrs.get("dns_name") ip_address = ids["address"] - diffsync.job.logger.warning(f"IP Address {ip_address}. DNS name: {dns_name}.") if not dns_name: diffsync.job.logger.warning( f"Cannot create Infoblox record for IP Address {ip_address}. DNS name is not defined." From 454d2bc18b9d24b19513300f5114011d8c0522e6 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 23 May 2024 15:03:11 +0100 Subject: [PATCH 084/229] Enforce object type sync selection. --- .../infoblox/diffsync/adapters/nautobot.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index c80719c03..64d658246 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -199,6 +199,10 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in all_prefixes = all_prefixes.union(Prefix.objects.filter(**query_filters)) # Filter on namespace name only if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: + if include_ipv4 and not include_ipv6: + query_filters["ip_version"] = 4 + elif include_ipv6 and not include_ipv4: + query_filters["ip_version"] = 6 all_prefixes = all_prefixes.union(Prefix.objects.filter(**query_filters)) return all_prefixes @@ -258,6 +262,10 @@ def _load_all_ipaddresses_filtered(self, sync_filters: list, include_ipv4: bool, all_ipaddresses = all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) # Filter on namespace name only if "prefixes_ipv4" not in sync_filter and "prefixes_ipv6" not in sync_filter: + if include_ipv4 and not include_ipv6: + query_filters["ip_version"] = 4 + elif include_ipv6 and not include_ipv4: + query_filters["ip_version"] = 6 all_ipaddresses = all_ipaddresses.union(IPAddress.objects.filter(**query_filters)) return all_ipaddresses @@ -372,15 +380,19 @@ def load(self): self.load_namespaces(sync_filters=sync_filters) if "namespace" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['namespace'])} Namespaces from Nautobot.") - self.load_prefixes(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) + if self.config.import_subnets: + self.load_prefixes(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) if "prefix" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['prefix'])} prefixes from Nautobot.") - self.load_ipaddresses(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) + if self.config.import_ip_addresses: + self.load_ipaddresses(sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6) if "ipaddress" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['ipaddress'])} IP addresses from Nautobot.") - self.load_vlangroups() + if self.config.import_vlan_views: + self.load_vlangroups() if "vlangroup" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['vlangroup'])} VLAN Groups from Nautobot.") - self.load_vlans() + if self.config.import_vlans: + self.load_vlans() if "vlan" in self.dict(): self.job.logger.info(f"Loaded {len(self.dict()['vlan'])} VLANs from Nautobot.") From ee145b86026434f12eef2182d4a8f53236131e06 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 23 May 2024 15:03:34 +0100 Subject: [PATCH 085/229] Create status Active if it does not exist. --- nautobot_ssot/integrations/infoblox/signals.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index 4c1168f88..ea20dd33a 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -101,7 +101,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa if found_status.exists(): default_status = found_status.first() else: - default_status = Status.objects.get(name="Active") + default_status, _ = Status.objects.get_or_create(name="Active") try: infoblox_request_timeout = int(config.get("infoblox_request_timeout", 60)) From 209aa6556255fa605b316dd503beebe5031707c3 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 23 May 2024 15:04:13 +0100 Subject: [PATCH 086/229] Add IP Address tests. Add IPv6 tests. --- .../tests/infoblox/test_nautobot_adapter.py | 190 +++++++++++++++++- 1 file changed, 186 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 50e0b5a37..650e89183 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -4,7 +4,7 @@ from django.test import TestCase from nautobot.extras.models import RelationshipAssociation, Status -from nautobot.ipam.models import Namespace, Prefix, VLAN, VLANGroup +from nautobot.ipam.models import IPAddress, Namespace, Prefix, VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship @@ -48,6 +48,7 @@ def setUp(self): vlan_group=vlan_group2, ) namespace_dev, _ = Namespace.objects.get_or_create(name="dev") + namespace_test, _ = Namespace.objects.get_or_create(name="test") prefix1 = Prefix.objects.create( prefix="10.0.0.0/24", status=active_status, @@ -62,23 +63,96 @@ def setUp(self): ) prefix1.cf["dhcp_ranges"] = "10.0.0.50-10.0.0.254" prefix1.save() - Prefix.objects.create( + prefix2 = Prefix.objects.create( prefix="10.0.1.0/24", status=active_status, type="Network", ) - Prefix.objects.create( + prefix3 = Prefix.objects.create( prefix="10.0.1.0/24", status=active_status, type="Network", namespace=namespace_dev, ) - Prefix.objects.create( + prefix4 = Prefix.objects.create( prefix="10.2.1.0/24", status=active_status, type="Network", namespace=namespace_dev, ) + prefix5 = Prefix.objects.create( + prefix="10.2.1.0/25", + status=active_status, + type="Network", + namespace=namespace_test, + ) + prefix6 = Prefix.objects.create( + prefix="10.5.1.0/25", + status=active_status, + type="Network", + namespace=namespace_test, + ) + ipv6prefix1 = Prefix.objects.create( + prefix="2001:5b0:4100::/48", + status=active_status, + type="Network", + ) + IPAddress.objects.create( + description="Test IPAddress 1", + address="10.0.1.1/24", + status=active_status, + type="host", + dns_name="server1.nautobot.test.com", + parent_id=prefix2.id, + ) + IPAddress.objects.create( + description="Test IPAddress 2", + address="10.0.1.2/24", + status=active_status, + type="host", + dns_name="server2.nautobot.test.com", + parent_id=prefix2.id, + ) + IPAddress.objects.create( + description="Test IPAddress 3", + address="10.0.1.1/24", + status=active_status, + type="host", + dns_name="server10.nautobot.test.com", + parent_id=prefix3.id, + ) + IPAddress.objects.create( + description="Test IPAddress 4", + address="10.2.1.1/24", + status=active_status, + type="host", + dns_name="server11.nautobot.test.com", + parent_id=prefix4.id, + ) + IPAddress.objects.create( + description="Test IPAddress 5", + address="10.2.1.10/25", + status=active_status, + type="host", + dns_name="server20.nautobot.test.com", + parent_id=prefix5.id, + ) + IPAddress.objects.create( + description="Test IPAddress 6", + address="10.5.1.5/25", + status=active_status, + type="host", + dns_name="server21.nautobot.test.com", + parent_id=prefix6.id, + ) + IPAddress.objects.create( + description="Test IPv6Address 1", + address="2001:5b0:4100::1/48", + status=active_status, + type="host", + dns_name="v6server1.nautobot.test.com", + parent_id=ipv6prefix1.id, + ) self.config = create_default_infoblox_config() self.sync_filters = self.config.infoblox_sync_filters self.nb_adapter = NautobotAdapter(config=self.config) @@ -119,6 +193,46 @@ def test_load_prefixes_loads_prefixes_dev_namespace_ipv4_filter(self): }, ) + def test_load_prefixes_loads_prefixes_multiple_filters(self): + sync_filters = [ + {"network_view": "dev", "prefixes_ipv4": ["10.0.0.0/16"]}, + {"network_view": "test", "prefixes_ipv4": ["10.0.0.0/8"]}, + ] + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual( + actual_prefixes, + { + ("10.0.1.0/24", "dev"), + ("10.2.1.0/25", "test"), + ("10.5.1.0/25", "test"), + }, + ) + + def test_load_prefixes_loads_prefixes_ipv6(self): + sync_filters = [{"network_view": "default"}] + self.nb_adapter.load_prefixes(include_ipv4=False, include_ipv6=True, sync_filters=sync_filters) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual( + actual_prefixes, + { + ("2001:5b0:4100::/48", "Global"), + }, + ) + + def test_load_prefixes_loads_prefixes_ipv4_and_ipv6(self): + sync_filters = [{"network_view": "default"}] + self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=True, sync_filters=sync_filters) + actual_prefixes = {(prefix.network, prefix.namespace) for prefix in self.nb_adapter.get_all("prefix")} + self.assertEqual( + actual_prefixes, + { + ("10.0.0.0/24", "Global"), + ("10.0.1.0/24", "Global"), + ("2001:5b0:4100::/48", "Global"), + }, + ) + def test_load_prefixes_loads_prefixes_and_vlan_relationship(self): self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) prefix_with_vlan = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24", "namespace": "Global"}) @@ -128,3 +242,71 @@ def test_load_prefixes_loads_ranges(self): self.nb_adapter.load_prefixes(include_ipv4=True, include_ipv6=False, sync_filters=self.sync_filters) prefix_with_ranges = self.nb_adapter.get("prefix", {"network": "10.0.0.0/24", "namespace": "Global"}) self.assertEqual(["10.0.0.50-10.0.0.254"], prefix_with_ranges.ranges) + + def test_load_ipaddresses_loads_ips_default_namespace(self): + sync_filters = [{"network_view": "default"}] + self.nb_adapter.load_ipaddresses(sync_filters=sync_filters, include_ipv4=True, include_ipv6=False) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + {("10.0.1.1", "Global"), ("10.0.1.2", "Global")}, + ) + + def test_load_ipaddresses_loads_ips_dev_namespace(self): + sync_filters = [{"network_view": "dev"}] + self.nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + {("10.0.1.1", "dev"), ("10.2.1.1", "dev")}, + ) + + def test_load_ipaddresses_loads_ips_dev_namespace_filtered(self): + sync_filters = [{"network_view": "dev", "prefixes_ipv4": ["10.0.1.0/24"]}] + self.nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + { + ("10.0.1.1", "dev"), + }, + ) + + def test_load_ipaddresses_loads_ips_multiple_filters(self): + sync_filters = [ + {"network_view": "dev", "prefixes_ipv4": ["10.0.0.0/16"]}, + {"network_view": "test", "prefixes_ipv4": ["10.5.0.0/16"]}, + ] + self.nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + { + ("10.0.1.1", "dev"), + ("10.5.1.5", "test"), + }, + ) + + def test_load_ipaddresses_loads_ips_ipv6(self): + sync_filters = [{"network_view": "default"}] + self.nb_adapter.load_ipaddresses(include_ipv4=False, include_ipv6=True, sync_filters=sync_filters) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + { + ("2001:5b0:4100::1", "Global"), + }, + ) + + def test_load_ipaddresses_loads_ips_ipv4_and_ipv6(self): + sync_filters = [{"network_view": "default"}] + self.nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=True, sync_filters=sync_filters) + actual_ipaddresses = {(ipaddr.address, ipaddr.namespace) for ipaddr in self.nb_adapter.get_all("ipaddress")} + self.assertEqual( + actual_ipaddresses, + { + ("10.0.1.1", "Global"), + ("10.0.1.2", "Global"), + ("2001:5b0:4100::1", "Global"), + }, + ) From 0572feb39e04d91ea51b4778bf30122f53094110 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 24 May 2024 18:12:40 +0100 Subject: [PATCH 087/229] Add support for fixed address (mac and reservation). --- .../integrations/infoblox/choices.py | 44 ++++ .../infoblox/diffsync/adapters/infoblox.py | 25 ++- .../infoblox/diffsync/adapters/nautobot.py | 29 ++- .../infoblox/diffsync/models/base.py | 6 + .../infoblox/diffsync/models/infoblox.py | 192 +++++++++++++++--- nautobot_ssot/integrations/infoblox/forms.py | 15 +- nautobot_ssot/integrations/infoblox/jobs.py | 3 - nautobot_ssot/integrations/infoblox/models.py | 30 ++- .../integrations/infoblox/signals.py | 1 + .../integrations/infoblox/utils/client.py | 67 +++++- .../integrations/infoblox/utils/diffsync.py | 2 +- .../ssotinfobloxconfig_changelog.html | 1 + .../ssotinfobloxconfig_retrieve.html | 16 +- .../ssotinfobloxconfig_update.html | 3 + .../tests/infoblox/test_infoblox_models.py | 83 +++++--- 15 files changed, 433 insertions(+), 84 deletions(-) create mode 100644 nautobot_ssot/integrations/infoblox/choices.py create mode 100644 nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html diff --git a/nautobot_ssot/integrations/infoblox/choices.py b/nautobot_ssot/integrations/infoblox/choices.py new file mode 100644 index 000000000..c6a670c8f --- /dev/null +++ b/nautobot_ssot/integrations/infoblox/choices.py @@ -0,0 +1,44 @@ +"""Choicesets for Infoblox integration.""" + +from nautobot.apps.choices import ChoiceSet + + +class FixedAddressTypeChoices(ChoiceSet): + """Choiceset used by SSOTInfobloxConfig. + + Infoblox supports the below values for `match_client` field in the `fixed_address` object: + + CIRCUIT_ID + CLIENT_ID + MAC_ADDRESS + REMOTE_ID + RESERVED + + We currently support creation of MAC_ADDRESS and RESERVED types only. + """ + + DONT_CREATE_RECORD = "do-not-create-record" + MAC_ADDRESS = "create-fixed-with-mac-address" + RESERVED = "create-reservation-no-mac-address" + + CHOICES = ( + (DONT_CREATE_RECORD, "Do not create fixed address"), + (MAC_ADDRESS, "Create record with MAC adddres"), + (RESERVED, "Create reservation with no MAC address"), + ) + + +class DNSRecordTypeChoices(ChoiceSet): + """Choiceset used by SSOTInfobloxConfig.""" + + DONT_CREATE_RECORD = "do-not-create-dns-record" + HOST_RECORD = "create-host-record" + A_RECORD = "create-a-record" + A_AND_PTR_RECORD = "create-a-and-ptr-records" + + CHOICES = ( + (DONT_CREATE_RECORD, "Do not create DNS record"), + (HOST_RECORD, "Create Host record"), + (A_RECORD, "Create A record"), + (A_AND_PTR_RECORD, "Create A and PTR records"), + ) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 603036a15..d6a4ddefd 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -22,6 +22,7 @@ build_vlan_map, get_ext_attr_dict, map_network_view_to_namespace, + validate_dns_name, ) @@ -205,10 +206,20 @@ def load_ipaddresses(self): default_ext_attrs = get_default_ext_attrs(review_list=ipaddrs, excluded_attrs=self.excluded_attrs) for _ip in ipaddrs: _, prefix_length = _ip["network"].split("/") + network_view = _ip["network_view"] dns_name = "" - if _ip["names"]: - dns_name = get_dns_name(possible_fqdn=_ip["names"][0]) - namespace = map_network_view_to_namespace(value=_ip["network_view"], direction="nv_to_ns") + fallback_dns_name = "" + # Record can have multiple names, if there is a DNS record attached we should use that name + # Otherwise return non-DNS name + for dns_name_candidate in _ip["names"]: + if validate_dns_name(infoblox_client=self.conn, dns_name=dns_name_candidate, network_view=network_view): + dns_name = dns_name_candidate + break + if not fallback_dns_name: + fallback_dns_name = get_dns_name(possible_fqdn=dns_name_candidate) + + dns_name = dns_name or fallback_dns_name + namespace = map_network_view_to_namespace(value=network_view, direction="nv_to_ns") ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {}), excluded_attrs=self.excluded_attrs) new_ip = self.ipaddress( @@ -221,6 +232,7 @@ def load_ipaddresses(self): ip_addr_type=self.conn.get_ipaddr_type(_ip), description=_ip["comment"], ext_attrs={**default_ext_attrs, **ip_ext_attrs}, + mac_address=None if not _ip["mac_address"] else _ip["mac_address"], ) # Record references to DNS Records linked to this IP Address @@ -235,6 +247,13 @@ def load_ipaddresses(self): elif obj_type == "record:ptr": new_ip.has_ptr_record = True new_ip.ptr_record_ref = ref + elif obj_type == "fixedaddress": + new_ip.has_fixed_address = True + new_ip.fixed_address_ref = ref + if "RESERVATION" in _ip["types"]: + new_ip.fixed_address_type = "RESERVED" + elif "FA" in _ip["types"]: + new_ip.fixed_address_type = "MAC_ADDRESS" self.add(new_ip) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 64d658246..857a8c426 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -11,8 +11,10 @@ from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Relationship, Role, Status, Tag from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup +from nautobot.ipam.choices import IPAddressTypeChoices from nautobot.tenancy.models import Tenant +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR from nautobot_ssot.integrations.infoblox.diffsync.models import ( NautobotIPAddress, @@ -298,6 +300,14 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: ) continue + mac_address = None + if ipaddr.type == IPAddressTypeChoices.TYPE_DHCP: + if self.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: + _ip.has_fixed_address = True + elif self.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: + _ip.has_fixed_address = True + mac_address = ipaddr.custom_field_data.get("mac_address") + custom_fields = get_valid_custom_fields(ipaddr.custom_field_data, excluded_cfs=self.excluded_cfs) _ip = self.ipaddress( address=addr, @@ -309,18 +319,29 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: dns_name=ipaddr.dns_name, description=ipaddr.description, ext_attrs={**default_cfs, **custom_fields}, + mac_address=mac_address, pk=ipaddr.id, ) # Pretend IP Address has matching DNS records if dns name is defined. # This will be compared against values set on Infoblox side. + if ipaddr.dns_name: - if self.config.create_host_record: + if self.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: _ip.has_host_record = True - elif self.config.create_a_record: + elif self.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: + _ip.has_a_record = True + elif self.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: _ip.has_a_record = True - if self.config.create_ptr_record: - _ip.has_ptr_record = True + _ip.has_ptr_record = True + + # if ipaddr.dns_name: + # if self.config.create_host_record: + # _ip.has_host_record = True + # elif self.config.create_a_record: + # _ip.has_a_record = True + # if self.config.create_ptr_record: + # _ip.has_ptr_record = True try: self.add(_ip) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index 1077cafe7..76bb9113b 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -77,6 +77,8 @@ class IPAddress(DiffSyncModel): "has_host_record", "has_a_record", "has_ptr_record", + "has_fixed_address", + "mac_address", ) address: str @@ -91,8 +93,12 @@ class IPAddress(DiffSyncModel): has_a_record: bool = False has_host_record: bool = False has_ptr_record: bool = False + has_fixed_address: bool = False + mac_address: Optional[str] pk: Optional[uuid.UUID] = None a_record_ref: Optional[str] = None host_record_ref: Optional[str] = None ptr_record_ref: Optional[str] = None + fixed_address_ref: Optional[str] = None + fixed_address_type: Optional[str] = None diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index e10190016..93f4d5fc8 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -1,6 +1,7 @@ """Infoblox Models for Infoblox integration with SSoT app.""" from requests.exceptions import HTTPError +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView from nautobot_ssot.integrations.infoblox.utils.diffsync import map_network_view_to_namespace, validate_dns_name @@ -82,18 +83,58 @@ class InfobloxIPAddress(IPAddress): @classmethod def create(cls, diffsync, ids, attrs): - """Create either a Host record or an A record. + """Creates IP Address Reservation. Additionally create DNS Host record or an A record. Optionally creates a PTR record in addition to an A record. - This requires the IP Address to have a DNS name + DNS record creation requires the IP Address to have a DNS name """ network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") dns_name = attrs.get("dns_name") ip_address = ids["address"] + name = attrs.get("description") + mac_address = attrs.get("mac_address") + + if dns_name: + fa_name = dns_name + else: + fa_name = name + + if diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: + # if diffsync.config.create_ip_reservation: + diffsync.conn.create_fixed_address( + ip_address=ip_address, name=fa_name, match_client="RESERVED", network_view=network_view + ) + diffsync.job.logger.debug( + "Created fixed address reservation, address: %s, name: %s, network_view %s", + ip_address, + fa_name, + network_view, + ) + elif diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: + diffsync.conn.create_fixed_address( + ip_address=ip_address, + name=fa_name, + mac_address=mac_address, + match_client="MAC_ADDRESS", + network_view=network_view, + ) + diffsync.job.logger.debug( + "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view %s", + ip_address, + fa_name, + mac_address, + network_view, + ) + + # DNS record not needed, we can return + if diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + # if not(diffsync.conn.create_a_record or diffsync.config.create_host_record): + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + if not dns_name: diffsync.job.logger.warning( - f"Cannot create Infoblox record for IP Address {ip_address}. DNS name is not defined." + f"Cannot create Infoblox DNS record for IP Address {ip_address}. DNS name is not defined." ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -102,12 +143,17 @@ def create(cls, diffsync, ids, attrs): diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - if diffsync.config.create_a_record and attrs.get("has_a_record"): + if diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: + # if diffsync.config.create_a_record and attrs.get("has_a_record"): diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) + elif diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: + diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) + diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) # Only create PTR records if A record has been created - if diffsync.config.create_ptr_record and attrs.get("has_ptr_record"): - diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) - elif diffsync.config.create_host_record and attrs.get("has_host_record"): + # if diffsync.config.create_ptr_record and attrs.get("has_ptr_record"): + # diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) + elif diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: + # elif diffsync.config.create_host_record and attrs.get("has_host_record"): diffsync.conn.create_host_record(dns_name, ip_address, network_view=network_view) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -116,35 +162,90 @@ def update(self, attrs): # pylint: disable=too-many-branches ids = self.get_identifiers() inf_attrs = self.get_attrs() ip_address = ids["address"] + new_dns_name = attrs.get("dns_name") + description = attrs.get("description") network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") - payload = {} - if attrs.get("description"): - payload.update({"comment": attrs["description"]}) - if attrs.get("dns_name"): - payload.update({"name": attrs["dns_name"]}) + mac_address = attrs.get("mac_address") + + self.diffsync.job.logger.warning(f"attrs {attrs}, config {self.diffsync.config.dns_record_type}") + fa_update_data = {} + + # new dns_name == "" - dns name erased + # replace it with new description + # if no new description - use existing description + + # DNS name updated in Nautobot + if new_dns_name: + fa_update_data["name"] = new_dns_name + if new_dns_name == "": + if description: + fa_update_data["name"] = description + elif inf_attrs.get("description"): + fa_update_data["name"] = inf_attrs.get("description") + if description: + fa_update_data["comment"] = description + + self.diffsync.job.logger.warning( + f"fa_update_data {fa_update_data}, fixed_address_type: {self.fixed_address_type}, mac_address {mac_address}, fixed_address_ref {self.fixed_address_ref}" + ) + if ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED + and self.fixed_address_type == "RESERVED" + and fa_update_data + ): + self.diffsync.job.logger.warning(f"Updating RESERVED") + # if self.diffsync.config.create_ip_reservation and inf_attrs["has_fixed_address"] and description: + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + # Do not allow updating MAC address + elif ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS + and self.fixed_address_type == "MAC_ADDRESS" + and (fa_update_data or mac_address) + ): + if mac_address: + fa_update_data["mac"] = mac_address + self.diffsync.job.logger.warning(f"Updating MAC_ADDRESS, pyld: {fa_update_data}") + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + + # DNS record not needed, we can return + if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + # if not(self.diffsync.conn.create_a_record or self.diffsync.config.create_host_record): + return super().update(attrs) + + # # Changed attrs are not related to DNS record updates + # if not (dns_payload or ptr_payload): + # return super().update(attrs) # Nautobot side doesn't check if dns name is fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. # We get either existing DNS name, or a new one. This is because name might be the same but we need to create a PTR record. - dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) - if not dns_name: + canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) + if not canonical_dns_name: self.diffsync.job.logger.warning( f"Cannot update Infoblox record for IP Address {ip_address}. DNS name is not defined." ) return super().update(attrs) - if not validate_dns_name(self.diffsync.conn, dns_name, network_view): - self.diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") + if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" + ) return super().update(attrs) # Infoblox Host record acts as a combined A/PTR record. # Only allow creating/updating A and PTR record if IP Address doesn't have a corresponding Host record. # Only allows creating/updating Host record if IP Address doesn't have a corresponding A or PTR record. incompatible_record_types = False - if attrs.get("has_a_record", False) and self.diffsync.config.create_a_record and inf_attrs["has_host_record"]: + # if attrs.get("has_a_record", False) and self.diffsync.config.create_a_record and inf_attrs["has_host_record"]: + if ( + attrs.get("has_a_record", False) + and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD + and inf_attrs["has_host_record"] + ): incomp_msg = f"Cannot update A Record for IP Address, {ip_address}. It already has an existing Host Record." incompatible_record_types = True elif ( attrs.get("has_ptr_record", False) - and self.diffsync.config.create_ptr_record + # and self.diffsync.config.create_ptr_record + and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD and inf_attrs["has_host_record"] ): incomp_msg = ( @@ -153,14 +254,16 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True elif ( attrs.get("has_host_record", False) - and self.diffsync.config.create_host_record + # and self.diffsync.config.create_host_record + and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD and inf_attrs["has_a_record"] ): incomp_msg = f"Cannot update Host Record for IP Address, {ip_address}. It already has an existing A Record." incompatible_record_types = True elif ( attrs.get("has_host_record", False) - and self.diffsync.config.create_host_record + # and self.diffsync.config.create_host_record + and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD and inf_attrs["has_ptr_record"] ): incomp_msg = ( @@ -172,28 +275,53 @@ def update(self, attrs): # pylint: disable=too-many-branches self.diffsync.job.logger.warning(incomp_msg) return super().update(attrs) + dns_payload = {} + ptr_payload = {} + if description: + dns_payload.update({"comment": description}) + ptr_payload.update({"comment": description}) + if attrs.get("dns_name"): + dns_payload.update({"name": attrs.get("dns_name")}) + ptr_payload.update({"ptrdname": attrs.get("dns_name")}) + + self.diffsync.job.logger.warning( + f"config {self.diffsync.config.dns_record_type}, {self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD}" + ) a_record_action = "none" ptr_record_action = "none" host_record_action = "none" - if self.diffsync.config.create_a_record and inf_attrs["has_a_record"]: - a_record_action = "update" - if self.diffsync.config.create_ptr_record: + # if self.diffsync.config.create_a_record and inf_attrs["has_a_record"]: + if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: + a_record_action = "update" if inf_attrs["has_a_record"] else "create" + elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: + self.diffsync.job.logger.warning( + f"config {self.diffsync.config.dns_record_type}, {self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD}" + ) + a_record_action = "update" if inf_attrs["has_a_record"] else "create" ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" - if self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: - host_record_action = "update" + # if self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: + elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: + host_record_action = "update" if inf_attrs["has_host_record"] else "create" # IP Address in Infoblox is not a plain IP Address like in Nautobot. # In Infoblox we can fixed_address (not supported here), Host record for IP Address, or A Record for IP Address. # When syncing from Nautobot to Infoblox we take IP Address and check if it has dns_name field populated. # We then combine this with the Infoblox Config toggles to arrive at the desired state in Infoblox. - if host_record_action == "update" and payload: - self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=payload) - if a_record_action == "update" and payload: - self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=payload) - if ptr_record_action == "update" and payload: - self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=payload) + self.diffsync.job.logger.warning(f"host_record_action {host_record_action}, dns_payload {dns_payload}") + self.diffsync.job.logger.warning(f"ptr_record_action {ptr_record_action}, dns_payload {dns_payload}") + self.diffsync.job.logger.warning(f"inf_attrs {inf_attrs}") + if host_record_action == "update" and dns_payload: + self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=dns_payload) + elif host_record_action == "create": + self.diffsync.conn.create_host_record(canonical_dns_name, ip_address, network_view=network_view) + if a_record_action == "update" and dns_payload: + self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=dns_payload) + elif a_record_action == "create": + self.diffsync.conn.create_a_record(canonical_dns_name, ip_address, network_view=network_view) + if ptr_record_action == "update" and ptr_payload: + self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=ptr_payload) elif ptr_record_action == "create": - self.diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) + self.diffsync.conn.create_ptr_record(canonical_dns_name, ip_address, network_view=network_view) return super().update(attrs) # def delete(self): diff --git a/nautobot_ssot/integrations/infoblox/forms.py b/nautobot_ssot/integrations/infoblox/forms.py index e87510815..af0daeade 100644 --- a/nautobot_ssot/integrations/infoblox/forms.py +++ b/nautobot_ssot/integrations/infoblox/forms.py @@ -1,9 +1,12 @@ """Forms implementation for SSOT Infoblox.""" +from django import forms + from nautobot.extras.forms import NautobotModelForm, NautobotFilterForm -from nautobot.apps.forms import JSONField +from nautobot.apps.forms import JSONField, StaticSelect2 from .models import SSOTInfobloxConfig +from .choices import FixedAddressTypeChoices, DNSRecordTypeChoices class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-ancestors @@ -23,6 +26,16 @@ class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-anc help_text="Provide list of Extensible Attributes and Custom Fields to ignore during sync." " Assign lists to keys `extensible_attributes` and `custom_fields`.", ) + fixed_address_type = forms.ChoiceField( + choices=FixedAddressTypeChoices, + required=True, + widget=StaticSelect2(), + ) + dns_record_type = forms.ChoiceField( + choices=DNSRecordTypeChoices, + required=True, + widget=StaticSelect2(), + ) class Meta: """Meta attributes for the SSOTInfobloxConfigForm class.""" diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index 386b11f39..a32182bf8 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -170,6 +170,3 @@ def run(self, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disa jobs = [InfobloxDataSource, InfobloxDataTarget] - -# if PLUGIN_CFG["enable_sync_to_infoblox"]: -# jobs.append(InfobloxDataTarget) diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py index d4b8ae75a..e13554243 100644 --- a/nautobot_ssot/integrations/infoblox/models.py +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -7,10 +7,17 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db import models +try: + from nautobot.apps.constants import CHARFIELD_MAX_LENGTH +except ImportError: + CHARFIELD_MAX_LENGTH = 255 + from nautobot.core.models.generics import PrimaryModel from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import SecretsGroupAssociation +from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices, DNSRecordTypeChoices + def _get_default_sync_filters(): """Provides default value for SSOTInfobloxConfig infoblox_sync_filters field.""" @@ -77,15 +84,32 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors default=False, verbose_name="Import IPv6", ) - create_host_record = models.BooleanField( + dns_record_type = models.CharField( + max_length=CHARFIELD_MAX_LENGTH, + default=DNSRecordTypeChoices.HOST_RECORD, + choices=DNSRecordTypeChoices, + help_text="Choose what type of Infoblox DNS record to create for IP Addresses.", + ) + fixed_address_type = models.CharField( + max_length=CHARFIELD_MAX_LENGTH, + default=FixedAddressTypeChoices.MAC_ADDRESS, + choices=FixedAddressTypeChoices, + help_text="Choose what type of Infoblox fixed IP address record to create.", + ) + create_ip_reservation = models.BooleanField( default=True, + verbose_name="Create IP Reservation", + help_text="Infoblox - Create IP Address as IP Reservation", + ) + create_host_record = models.BooleanField( + default=False, verbose_name="Create Host Record", - help_text="Infoblox - Create IP Address as Host Record", + help_text="Infoblox - Create IP Address Host Record", ) create_a_record = models.BooleanField( default=False, verbose_name="Create A Record", - help_text="Infoblox - Create IP Address as A Record", + help_text="Infoblox - Create IP Address A Record", ) create_ptr_record = models.BooleanField( default=False, diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index ea20dd33a..fc8c147e5 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -168,6 +168,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa infoblox_sync_filters=infoblox_sync_filters, infoblox_dns_view_mapping={}, cf_fields_ignore={}, + create_ip_reservation=False, create_a_record=False, create_host_record=True, create_ptr_record=False, diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index f66e0c18b..9614093e9 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -2,6 +2,7 @@ from __future__ import annotations +from functools import lru_cache import ipaddress import json import logging @@ -922,7 +923,7 @@ def get_all_dns_views(self): logger.error(response.text) return response.text - def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): + def create_a_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create an A record for a given FQDN. Please note: This API call with work only for host records that do not have an associated a record. @@ -946,6 +947,8 @@ def create_a_record(self, fqdn, ip_address, network_view: Optional[str] = None): if network_view: dns_view = self.get_dns_view_for_network_view(network_view) payload["view"] = dns_view + if comment: + payload["comment"] = comment response = self._request("POST", url_path, params=params, json=payload) try: logger.debug(response.json()) @@ -1203,6 +1206,7 @@ def get_authoritative_zone(self, network_view: Optional[str] = None): logger.error(response.text) return response.text + @lru_cache(maxsize=None) def get_authoritative_zones_for_dns_view(self, view: str): """Get authoritative zone list for given DNS view. @@ -1339,11 +1343,20 @@ def reserve_fixed_address(self, network, mac_address, network_view: Optional[str return response.text return False - def create_fixed_address(self, ip_address, mac_address, network_view: Optional[str] = None): + def create_fixed_address( + self, + ip_address, + name: str = None, + mac_address: Optional[str] = None, + comment: Optional[str] = None, + match_client: str = "MAC_ADDRESS", + network_view: Optional[str] = None, + ): """Create a fixed ip address within Infoblox. Args: network_view (str): Name of the network view, e.g. 'dev' + match_client: match client value, valid values are: "MAC_ADDRESS", "RESERVED" Returns: Str: The IP Address that was reserved @@ -1353,9 +1366,18 @@ def create_fixed_address(self, ip_address, mac_address, network_view: Optional[s """ url_path = "fixedaddress" params = {"_return_fields": "ipv4addr", "_return_as_object": 1} - payload = {"ipv4addr": ip_address, "mac": mac_address} + valid_match_client_choices = ["MAC_ADDRESS", "RESERVED"] + if match_client not in valid_match_client_choices: + return None + payload = {"ipv4addr": ip_address, "match_client": match_client} + if match_client == "MAC_ADDRESS" and mac_address: + payload["mac"] = mac_address if network_view: payload["network_view"] = network_view + if name: + payload["name"] = name + if comment: + payload["comment"] = comment response = self._request("POST", url_path, params=params, json=payload) try: logger.debug(response.json()) @@ -1365,7 +1387,37 @@ def create_fixed_address(self, ip_address, mac_address, network_view: Optional[s logger.error(response.text) return response.text - def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = None): + def update_fixed_address(self, ref, data): + """Update a fixed ip address within Infoblox. + + Args: + ref (str): Reference to Host record + + Returns: + Dict: Dictionary of _ref and name + + Return Response: + { + + "_ref": "record:host/ZG5zLmhvc3QkLjEuY29tLmluZm9ibG94Lmhvc3Q:host.infoblox.com/default.test", + "name": "host.infoblox.com", + } + """ + params = {} + try: + response = self._request("PUT", path=ref, params=params, json=data) + except HTTPError as err: + logger.error("Could not update Host address: %s for ref %s", err.response.text, ref) + return None + try: + logger.debug("Infoblox host record updated: %s", response.json()) + results = response.json() + return results + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + + def create_host_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create a host record for a given FQDN. Please note: This API call with work only for host records that do not have an associated a record. @@ -1389,6 +1441,8 @@ def create_host_record(self, fqdn, ip_address, network_view: Optional[str] = Non payload = {"name": fqdn, "configure_for_dns": False, "ipv4addrs": [{"ipv4addr": ip_address}]} if network_view: payload["network_view"] = network_view + if comment: + payload["comment"] = comment try: response = self._request("POST", url_path, params=params, json=payload) except HTTPError as err: @@ -1450,7 +1504,7 @@ def delete_host_record(self, ip_address, network_view: Optional[str] = None): logger.debug(response) return response - def create_ptr_record(self, fqdn, ip_address, network_view: Optional[str] = None): + def create_ptr_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create a PTR record for a given FQDN. Args: @@ -1478,6 +1532,8 @@ def create_ptr_record(self, fqdn, ip_address, network_view: Optional[str] = None if network_view: dns_view = self.get_dns_view_for_network_view(network_view) payload["view"] = dns_view + if comment: + payload["comment"] = comment response = self._request("POST", url_path, params=params, json=payload) try: logger.debug("Infoblox PTR record created: %s", response.json()) @@ -2167,6 +2223,7 @@ def get_dns_view_for_network_view(self, network_view: str): return dns_view + @lru_cache(maxsize=None) def get_default_dns_view_for_network_view(self, network_view: str): """Get default (first on the list) DNS view for given network view. diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index 87a1b402c..fb3504cd2 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -94,7 +94,7 @@ def get_valid_custom_fields(cfs: dict, excluded_cfs: list): cfs: custom fields excluded_cfs: list of excluded custom fields """ - default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges"] + default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges", "mac_address"] excluded_cfs.extend(default_excluded_cfs) valid_cfs = {} for cf_name, val in cfs.items(): diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html new file mode 100644 index 000000000..76441438c --- /dev/null +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_changelog.html @@ -0,0 +1 @@ +{% extends 'generic/object_changelog.html' %} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html index 3e19007e0..b3d425583 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -69,11 +69,23 @@

    {{ object.import_ipv6 }} - Infoblox - Create IP Address as Host Record + Infoblox - Fixed IP Address Type + {{ object.fixed_address_type }} + + + Infoblox - DNS record type + {{ object.dns_record_type }} + + + Infoblox - Create IP Address as IP Reservation + {{ object.create_ip_reservation }} + + + Infoblox - Create Host Record for IP Address {{ object.create_host_record }} - Infoblox - Create IP Address as A Record + Infoblox - Create A Record for IP Address {{ object.create_a_record }} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html index afd16457f..f77dbe608 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -16,6 +16,9 @@ {% render_field form.import_vlans %} {% render_field form.import_ipv4 %} {% render_field form.import_ipv6 %} + {% render_field form.fixed_address_type %} + {% render_field form.dns_record_type %} + {% render_field form.create_ip_reservation %} {% render_field form.create_host_record %} {% render_field form.create_a_record %} {% render_field form.create_ptr_record %} diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index 910d1952c..b74fc271a 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -5,6 +5,7 @@ from django.test import TestCase +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter @@ -54,7 +55,9 @@ def test_ip_address_create_a_record(self, mock_tag_involved_objects, mock_valida with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_a_record = True + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) inf_ds_namespace = infoblox_adapter.namespace( name="Global", @@ -93,9 +96,11 @@ def test_ip_address_create_a_and_ptr_record(self, mock_tag_involved_objects, moc with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = True + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) inf_ds_namespace = infoblox_adapter.namespace( name="Global", @@ -139,9 +144,11 @@ def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = True - self.config.create_a_record = False - self.config.create_ptr_record = False + # self.config.create_host_record = True + # self.config.create_a_record = False + # self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) inf_ds_namespace = infoblox_adapter.namespace( name="Global", @@ -182,9 +189,11 @@ def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = False + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) inf_ds_namespace = infoblox_adapter.namespace( name="Global", @@ -195,7 +204,7 @@ def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_val job_logger = Mock() infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - log_msg = "Cannot create Infoblox record for IP Address 10.0.0.1. DNS name is not defined." + log_msg = "Cannot create Infoblox DNS record for IP Address 10.0.0.1. DNS name is not defined." job_logger.warning.assert_called_with(log_msg) mock_tag_involved_objects.assert_called_once() @@ -230,9 +239,11 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_a_record = False - self.config.create_ptr_record = False - self.config.create_host_record = True + # self.config.create_a_record = False + # self.config.create_ptr_record = False + # self.config.create_host_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -280,9 +291,11 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = False + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -330,9 +343,11 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = True + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -346,7 +361,9 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali "has_ptr_record": False, "a_record_ref": "record:a/xyz", } + print(_get_ip_address_dict(inf_ipaddress_atrs)) inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + print(infoblox_adapter.dict()) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) @@ -381,9 +398,11 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = True + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -403,7 +422,7 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.update_ptr_record.assert_called_once() infoblox_adapter.conn.update_ptr_record.assert_called_with( - ref="record:ptr/xyz", data={"name": "server2.local.test.net"} + ref="record:ptr/xyz", data={"ptrdname": "server2.local.test.net"} ) infoblox_adapter.conn.update_a_record.assert_called_once() infoblox_adapter.conn.update_a_record.assert_called_with( @@ -436,9 +455,11 @@ def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_object with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = False + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() job_logger = Mock() @@ -488,9 +509,11 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = False - self.config.create_a_record = True - self.config.create_ptr_record = True + # self.config.create_host_record = False + # self.config.create_a_record = True + # self.config.create_ptr_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() job_logger = Mock() From 1f0b546557084fc54b296e1911e04d3a2342e99b Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 24 May 2024 20:15:46 +0100 Subject: [PATCH 088/229] Add more tests for Infoblox models. --- .../tests/infoblox/test_infoblox_models.py | 500 ++++++++++++++++++ 1 file changed, 500 insertions(+) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index b74fc271a..7b25462ff 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -37,6 +37,42 @@ def setUp(self): self.nb_adapter = NautobotAdapter(config=self.config) self.nb_adapter.job = Mock() + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_nothing_get_created(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate nothing gets created if user selects DONT_CREATE_RECORD for DNS and Fixed Address options.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, @@ -210,6 +246,470 @@ def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_val mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_not_called() + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type RESERVED is created.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_reserved_no_dns_name( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type RESERVED is created with description used for name.""" + nb_ipaddress_atrs = {"dns_name": "", "description": "server1"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", name="server1", match_client="RESERVED", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type MAC_ADDRESS is created.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="server1.local.test.net", + mac_address="52:1f:83:d4:9a:2e", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_mac_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type MAC_ADDRESS is created with description used for name.""" + nb_ipaddress_atrs = {"dns_name": "", "description": "server1", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="server1", + mac_address="52:1f:83:d4:9a:2e", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_reserved_with_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type RESERVED is created with DNS Host record.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_reserved_with_a_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type RESERVED is created with DNS A record.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_reserved_with_a_and_ptr_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type RESERVED is created with DNS A and PTR records.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_mac_with_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type MAC_ADDRESS is created with DNS Host record.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="server1.local.test.net", + mac_address="52:1f:83:d4:9a:2e", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type MAC_ADDRESS is created with DNS A record.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="server1.local.test.net", + mac_address="52:1f:83:d4:9a:2e", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Validate Fixed Address type MAC_ADDRESS is created with DNS A and PTR records.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + # self.config.create_a_record = True + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="server1.local.test.net", + mac_address="52:1f:83:d4:9a:2e", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + class TestModelInfobloxIPAddressUpdate(TestCase): """Tests validating IP Address Update scenarios.""" From 1212518d0868ad2a6f4d456d1e6011a41b776f25 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 24 May 2024 20:35:34 +0100 Subject: [PATCH 089/229] Start adding tests for fixed address updates. --- .../tests/infoblox/test_infoblox_models.py | 137 ++++++++++++++---- 1 file changed, 106 insertions(+), 31 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index 7b25462ff..e71fd56fe 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -132,9 +132,6 @@ def test_ip_address_create_a_and_ptr_record(self, mock_tag_involved_objects, moc with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -180,9 +177,6 @@ def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = True - # self.config.create_a_record = False - # self.config.create_ptr_record = False self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -225,9 +219,6 @@ def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = False self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -264,7 +255,6 @@ def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_object with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -576,7 +566,6 @@ def test_ip_address_create_fixed_address_mac_with_host_record( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -625,7 +614,6 @@ def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involv with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -676,7 +664,6 @@ def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -720,6 +707,112 @@ def setUp(self): self.nb_adapter = NautobotAdapter(config=self.config) self.nb_adapter.job = Mock() + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_type_reserved_name_and_comment( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type RESERVED is updated.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_fixed_address": True, + "description": "new description", + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} + ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_type_reserved_description_used_for_name( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type RESERVED is updated. With no DNS name description is used for name and comment.""" + nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "new description"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "new description", "comment": "new description"} + ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, @@ -739,9 +832,6 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = False - # self.config.create_ptr_record = False - # self.config.create_host_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -791,9 +881,6 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = False self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -843,9 +930,6 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -898,9 +982,6 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -955,9 +1036,6 @@ def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_object with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = False self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -1009,9 +1087,6 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_host_record = False - # self.config.create_a_record = True - # self.config.create_ptr_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) From 0f2ddab982e07a104104029f601999ce6e636f5a Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:22:42 -0500 Subject: [PATCH 090/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20CVP=20i?= =?UTF-8?q?ntegration=20call=20to=20get=5Fdevices()=20to=20pass=20import?= =?UTF-8?q?=5Factive=20config=20setting.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/463.fixed | 1 + .../integrations/aristacv/diffsync/adapters/cloudvision.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changes/463.fixed diff --git a/changes/463.fixed b/changes/463.fixed new file mode 100644 index 000000000..754e36d02 --- /dev/null +++ b/changes/463.fixed @@ -0,0 +1 @@ +Fixed call in CVP integration to pass `import_active` config setting to get_devices() function call. \ No newline at end of file diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index a79aea7ab..a9c1f447e 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -69,7 +69,9 @@ def load_devices(self): except ObjectAlreadyExists as err: self.job.logger.warning(f"Error attempting to add CloudVision device. {err}") - for index, dev in enumerate(cloudvision.get_devices(client=self.conn.comm_channel), start=1): + for index, dev in enumerate( + cloudvision.get_devices(client=self.conn.comm_channel, import_active=config.import_active), start=1 + ): if self.job.debug: self.job.logger.info(f"Loading {index}° device") if dev["hostname"] != "": From 7acc97a41552627946aa3671c54ba8a756ad11af Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:21:07 +0100 Subject: [PATCH 091/229] Logic fixes. Typing and docs updates. --- .../infoblox/diffsync/adapters/infoblox.py | 27 ++++++++++++------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index d6a4ddefd..a02518b59 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -1,10 +1,8 @@ """Infoblox Adapter for Infoblox integration with SSoT app.""" import re -from typing import Optional import requests - from diffsync import DiffSync from diffsync.enum import DiffSyncFlags from diffsync.exceptions import ObjectAlreadyExists @@ -64,11 +62,11 @@ def __init__(self, *args, job=None, sync=None, conn, config, **kwargs): ) raise PluginImproperlyConfigured - def load_network_views(self, sync_filters: dict): + def load_network_views(self, sync_filters: list): """Load Namespace DiffSync model. Args: - sync_filter (dict): Sync filter containing sync rules + sync_filters (list): Sync filters containing sync rules """ if self.job.debug: self.job.logger.debug("Loading Network Views from Infoblox.") @@ -160,13 +158,19 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in return all_containers, all_subnets - def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list] = None): - """Load InfobloxNetwork DiffSync model.""" + def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): + """Load InfobloxNetwork DiffSync model. + + Args: + sync_filters (list): List of dicts, each dict is a single sync filter definition + include_ipv4 (bool): Whether to include IPv4 prefixes + include_ipv6 (bool): Whether to include IPv6 prefixes + """ if self.job.debug: self.job.logger.debug("Loading Subnets from Infoblox.") try: containers, subnets = self._load_all_prefixes_filtered( - sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 + sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 ) except requests.exceptions.HTTPError as err: self.job.logger.error(f"Error while loading prefixes: {str(err)}") @@ -193,7 +197,7 @@ def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Op except ObjectAlreadyExists: self.job.logger.warning(f"Duplicate prefix found: {new_pf}.") - def load_ipaddresses(self): + def load_ipaddresses(self): # pylint: disable=too-many-branches """Load InfobloxIPAddress DiffSync model.""" if self.job.debug: self.job.logger.debug("Loading IP addresses from Infoblox.") @@ -247,13 +251,16 @@ def load_ipaddresses(self): elif obj_type == "record:ptr": new_ip.has_ptr_record = True new_ip.ptr_record_ref = ref + # We currently only support RESERVED and MAC_ADDRESS types for fixed address elif obj_type == "fixedaddress": - new_ip.has_fixed_address = True - new_ip.fixed_address_ref = ref if "RESERVATION" in _ip["types"]: new_ip.fixed_address_type = "RESERVED" + new_ip.has_fixed_address = True + new_ip.fixed_address_ref = ref elif "FA" in _ip["types"]: new_ip.fixed_address_type = "MAC_ADDRESS" + new_ip.has_fixed_address = True + new_ip.fixed_address_ref = ref self.add(new_ip) From 18cacbc90144de25e3649797dd0b068b02b1ec6f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:21:51 +0100 Subject: [PATCH 092/229] Logic fixes. Typing and docs updates. --- .../infoblox/diffsync/adapters/nautobot.py | 62 +++++++++++++------ 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 857a8c426..9fe47181d 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -10,8 +10,8 @@ from nautobot.dcim.models import Location from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Relationship, Role, Status, Tag -from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot.ipam.choices import IPAddressTypeChoices +from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot.tenancy.models import Tenant from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices @@ -140,7 +140,11 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): super().sync_complete(source, *args, **kwargs) def _get_namespaces_from_sync_filters(self, sync_filters: list) -> set: - """Get namespaces defined in filters.""" + """Get namespaces defined in filters. + + Args: + sync_filters (list): Sync filters containing sync rules + """ namespaces = set() for sync_filter in sync_filters: namespace_name = map_network_view_to_namespace(value=sync_filter["network_view"], direction="nv_to_ns") @@ -149,7 +153,13 @@ def _get_namespaces_from_sync_filters(self, sync_filters: list) -> set: return namespaces def load_namespaces(self, sync_filters: Optional[list] = None): - """Load Namespace DiffSync model.""" + """Load Namespace DiffSync model. + + Args: + sync_filters (list): Sync filters containing sync rules + """ + if self.job.debug: + self.job.logger.debug("Loading Namespaces from Nautobot.") namespace_names = None if sync_filters: namespace_names = self._get_namespaces_from_sync_filters(sync_filters) @@ -209,8 +219,16 @@ def _load_all_prefixes_filtered(self, sync_filters: list, include_ipv4: bool, in return all_prefixes - def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: Optional[list]): - """Load Prefixes from Nautobot.""" + def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): + """Load Prefixes from Nautobot. + + Args: + sync_filters (list): List of dicts, each dict is a single sync filter definition + include_ipv4 (bool): Whether to include IPv4 prefixes + include_ipv6 (bool): Whether to include IPv6 prefixes + """ + if self.job.debug: + self.job.logger.debug("Loading Prefixes from Nautobot.") all_prefixes = self._load_all_prefixes_filtered( sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 ) @@ -273,7 +291,15 @@ def _load_all_ipaddresses_filtered(self, sync_filters: list, include_ipv4: bool, return all_ipaddresses def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): - """Load IP Addresses from Nautobot.""" + """Load IP Addresses from Nautobot. + + Args: + sync_filters (list): List of dicts, each dict is a single sync filter definition + include_ipv4 (bool): Whether to include IPv4 IP addresses + include_ipv6 (bool): Whether to include IPv6 addresses + """ + if self.job.debug: + self.job.logger.debug("Loading IP Addresses from Nautobot.") default_cfs = get_default_custom_fields( cf_contenttype=ContentType.objects.get_for_model(IPAddress), excluded_cfs=self.excluded_cfs ) @@ -281,7 +307,6 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: sync_filters=sync_filters, include_ipv4=include_ipv4, include_ipv6=include_ipv6 ) for ipaddr in all_ipaddresses: - self.ipaddr_map[str(ipaddr.address)] = ipaddr.id addr = ipaddr.host prefix = ipaddr.parent @@ -300,13 +325,14 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: ) continue - mac_address = None + # Infoblox fixed address records are of type DHCP. Only Nautobot IP addresses of type DHCP will trigger fixed address creation logic. + has_fixed_address = False + mac_address = ipaddr.custom_field_data.get("mac_address") if ipaddr.type == IPAddressTypeChoices.TYPE_DHCP: if self.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: - _ip.has_fixed_address = True + has_fixed_address = True elif self.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: - _ip.has_fixed_address = True - mac_address = ipaddr.custom_field_data.get("mac_address") + has_fixed_address = True custom_fields = get_valid_custom_fields(ipaddr.custom_field_data, excluded_cfs=self.excluded_cfs) _ip = self.ipaddress( @@ -321,11 +347,11 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: ext_attrs={**default_cfs, **custom_fields}, mac_address=mac_address, pk=ipaddr.id, + has_fixed_address=has_fixed_address, ) # Pretend IP Address has matching DNS records if dns name is defined. # This will be compared against values set on Infoblox side. - if ipaddr.dns_name: if self.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: _ip.has_host_record = True @@ -335,14 +361,6 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: _ip.has_a_record = True _ip.has_ptr_record = True - # if ipaddr.dns_name: - # if self.config.create_host_record: - # _ip.has_host_record = True - # elif self.config.create_a_record: - # _ip.has_a_record = True - # if self.config.create_ptr_record: - # _ip.has_ptr_record = True - try: self.add(_ip) except ObjectAlreadyExists: @@ -350,6 +368,8 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: def load_vlangroups(self): """Load VLAN Groups from Nautobot.""" + if self.job.debug: + self.job.logger.debug("Loading VLAN Groups from Nautobot.") default_cfs = get_default_custom_fields( cf_contenttype=ContentType.objects.get_for_model(VLANGroup), excluded_cfs=self.excluded_cfs ) @@ -366,6 +386,8 @@ def load_vlangroups(self): def load_vlans(self): """Load VLANs from Nautobot.""" + if self.job.debug: + self.job.logger.debug("Loading VLANs from Nautobot.") default_cfs = get_default_custom_fields( cf_contenttype=ContentType.objects.get_for_model(VLAN), excluded_cfs=self.excluded_cfs ) From 11d25d2b65c2ec34e0841d12ff39bb09fc130cff Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:23:58 +0100 Subject: [PATCH 093/229] IP Address update refactor. Logic fixes. Add debug msgs. --- .../infoblox/diffsync/models/infoblox.py | 292 +++++++++++------- 1 file changed, 172 insertions(+), 120 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 93f4d5fc8..d874fe1a5 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -1,8 +1,9 @@ """Infoblox Models for Infoblox integration with SSoT app.""" from requests.exceptions import HTTPError + from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices -from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.diffsync.models.base import IPAddress, Namespace, Network, Vlan, VlanView from nautobot_ssot.integrations.infoblox.utils.diffsync import map_network_view_to_namespace, validate_dns_name @@ -14,13 +15,18 @@ def create(cls, diffsync, ids, attrs): """Create Network object in Infoblox.""" status = attrs.get("status") network = ids["network"] + network_view = ids["namespace"] try: if status != "container": - diffsync.conn.create_network(prefix=network, comment=attrs.get("description", "")) + diffsync.conn.create_network( + prefix=network, comment=attrs.get("description", ""), network_view=network_view + ) else: - diffsync.conn.create_network_container(prefix=network, comment=attrs.get("description", "")) + diffsync.conn.create_network_container( + prefix=network, comment=attrs.get("description", ""), network_view=network_view + ) except HTTPError as err: - diffsync.job.logger.warning(f"Failed to create {ids['network']} due to {err.response.text}") + diffsync.job.logger.warning(f"Failed to create {network}-{network_view} due to {err.response.text}") dhcp_ranges = attrs.get("ranges") if dhcp_ranges: for dhcp_range in dhcp_ranges: @@ -30,54 +36,37 @@ def create(cls, diffsync, ids, attrs): prefix=network, start=start.strip(), end=end.strip(), + network_view=network_view, ) except HTTPError as err: - diffsync.job.logger.warning(f"Failed to create {dhcp_range} due to {err.response.text}") + diffsync.job.logger.warning( + f"Failed to create {dhcp_range}-{network_view} due to {err.response.text}" + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) def update(self, attrs): """Update Network object in Infoblox.""" self.diffsync.conn.update_network( - prefix=self.get_identifiers()["network"], comment=attrs.get("description", "") + prefix=self.get_identifiers()["network"], + network_view=self.get_identifiers()["namespace"], + comment=attrs.get("description", ""), ) if attrs.get("ranges"): self.diffsync.job.logger.warning( - f"Prefix, {self.network}, has a change of Ranges in Nautobot, but" + f"Prefix, {self.network}-{self.namespace}, has a change of Ranges in Nautobot, but" " updating Ranges in InfoBlox is currently not supported." ) return super().update(attrs) - # def delete(self): - # """Delete Network object in Infoblox.""" - # self.diffsync.conn.delete_network(self.get_identifiers()["network"]) - # return super().delete() - class InfobloxVLANView(VlanView): """Infoblox implementation of the VLANView Model.""" - # @classmethod - # def create(cls, diffsync, ids, attrs): - # """Create VLANView object in Infoblox.""" - # diffsync.conn.create_vlan( - # vlan_id=ids["vid"], - # vlan_name=attrs["vlan_name"], - # vlan_view=attrs["vlangroup"] if attrs.get("vlangroup") else "nautobot", - # ) - # return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - class InfobloxVLAN(Vlan): """Infoblox implementation of the VLAN Model.""" -# @classmethod -# def create(cls, diffsync, ids, attrs): -# """Create VLAN object in Infoblox.""" -# diffsync.conn.create_vlan_view(name=ids.name) -# return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - - class InfobloxIPAddress(IPAddress): """Infoblox implementation of the VLAN Model.""" @@ -95,41 +84,40 @@ def create(cls, diffsync, ids, attrs): name = attrs.get("description") mac_address = attrs.get("mac_address") + # Used DNS name for fixed address name if it exists. Otherwise use description. if dns_name: - fa_name = dns_name + fixed_address_name = dns_name else: - fa_name = name + fixed_address_name = name if diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: - # if diffsync.config.create_ip_reservation: diffsync.conn.create_fixed_address( - ip_address=ip_address, name=fa_name, match_client="RESERVED", network_view=network_view + ip_address=ip_address, name=fixed_address_name, match_client="RESERVED", network_view=network_view ) diffsync.job.logger.debug( - "Created fixed address reservation, address: %s, name: %s, network_view %s", + "Created fixed address reservation, address: %s, name: %s, network_view: %s", ip_address, - fa_name, + fixed_address_name, network_view, ) elif diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: diffsync.conn.create_fixed_address( ip_address=ip_address, - name=fa_name, + name=fixed_address_name, mac_address=mac_address, match_client="MAC_ADDRESS", network_view=network_view, ) diffsync.job.logger.debug( - "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view %s", + "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s", ip_address, - fa_name, + fixed_address_name, mac_address, network_view, ) # DNS record not needed, we can return if diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: - # if not(diffsync.conn.create_a_record or diffsync.config.create_host_record): return super().create(ids=ids, diffsync=diffsync, attrs=attrs) if not dns_name: @@ -140,63 +128,82 @@ def create(cls, diffsync, ids, attrs): # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. if not validate_dns_name(diffsync.conn, dns_name, network_view): - diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}") + diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}.") return super().create(ids=ids, diffsync=diffsync, attrs=attrs) if diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: - # if diffsync.config.create_a_record and attrs.get("has_a_record"): diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) + diffsync.job.logger.debug( + "Created DNS A record, address: %s, dns_name: %s, network_view: %s", + ip_address, + dns_name, + network_view, + ) elif diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) + diffsync.job.logger.debug( + "Created DNS A record, address: %s, dns_name: %s, network_view: %s", + ip_address, + dns_name, + network_view, + ) diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) - # Only create PTR records if A record has been created - # if diffsync.config.create_ptr_record and attrs.get("has_ptr_record"): - # diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) + diffsync.job.logger.debug( + "Created DNS PTR record, address: %s, dns_name: %s, network_view: %s", + ip_address, + dns_name, + network_view, + ) elif diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: - # elif diffsync.config.create_host_record and attrs.get("has_host_record"): diffsync.conn.create_host_record(dns_name, ip_address, network_view=network_view) + diffsync.job.logger.debug( + "Created DNS Host record, address: %s, dns_name: %s, network_view: %s", + ip_address, + dns_name, + network_view, + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - def update(self, attrs): # pylint: disable=too-many-branches - """Update IP Address object in Infoblox.""" - ids = self.get_identifiers() - inf_attrs = self.get_attrs() - ip_address = ids["address"] - new_dns_name = attrs.get("dns_name") - description = attrs.get("description") - network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") - mac_address = attrs.get("mac_address") - - self.diffsync.job.logger.warning(f"attrs {attrs}, config {self.diffsync.config.dns_record_type}") - fa_update_data = {} + def _update_fixed_address(self, new_attrs: dict, inf_attrs: dict, ip_address: str, network_view: str) -> None: + """Updates fixed address record in Infoblox. - # new dns_name == "" - dns name erased - # replace it with new description - # if no new description - use existing description + Args: + new_attrs: Object attributes changed in Nautobot + inf_attrs: Infoblox object attributes + ip_address: IP address of the fixed address + network_view: Network View of the fixed address + """ + new_dns_name = new_attrs.get("dns_name") + new_description = new_attrs.get("description") + mac_address = new_attrs.get("mac_address") - # DNS name updated in Nautobot + fa_update_data = {} + # Fixed Address name uses DNS Name if it's defined, then description. if new_dns_name: fa_update_data["name"] = new_dns_name + # DNS name cleared on Nautobot side if new_dns_name == "": - if description: - fa_update_data["name"] = description + # Description updated on Nautobot side + if new_description: + fa_update_data["name"] = new_description + # Nautobot description not updated. Copy Infoblox description over to the name attribute. elif inf_attrs.get("description"): fa_update_data["name"] = inf_attrs.get("description") - if description: - fa_update_data["comment"] = description + if new_description: + fa_update_data["comment"] = new_description - self.diffsync.job.logger.warning( - f"fa_update_data {fa_update_data}, fixed_address_type: {self.fixed_address_type}, mac_address {mac_address}, fixed_address_ref {self.fixed_address_ref}" - ) if ( self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED and self.fixed_address_type == "RESERVED" and fa_update_data ): - self.diffsync.job.logger.warning(f"Updating RESERVED") - # if self.diffsync.config.create_ip_reservation and inf_attrs["has_fixed_address"] and description: self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) - # Do not allow updating MAC address + self.diffsync.job.logger.debug( + "Updated fixed address reservation, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + ) elif ( self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and self.fixed_address_type == "MAC_ADDRESS" @@ -204,37 +211,30 @@ def update(self, attrs): # pylint: disable=too-many-branches ): if mac_address: fa_update_data["mac"] = mac_address - self.diffsync.job.logger.warning(f"Updating MAC_ADDRESS, pyld: {fa_update_data}") self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + self.diffsync.job.logger.debug( + "Updated fixed address with MAC, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + ) - # DNS record not needed, we can return - if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: - # if not(self.diffsync.conn.create_a_record or self.diffsync.config.create_host_record): - return super().update(attrs) + def _check_for_incompatible_record_types(self, attrs: dict, inf_attrs: dict, ip_address: str): + """Checks whether requested changes to the DNS records are compatible with existing Infoblox DNS objects. - # # Changed attrs are not related to DNS record updates - # if not (dns_payload or ptr_payload): - # return super().update(attrs) - - # Nautobot side doesn't check if dns name is fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. - # We get either existing DNS name, or a new one. This is because name might be the same but we need to create a PTR record. - canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) - if not canonical_dns_name: - self.diffsync.job.logger.warning( - f"Cannot update Infoblox record for IP Address {ip_address}. DNS name is not defined." - ) - return super().update(attrs) - if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): - self.diffsync.job.logger.warning( - f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" - ) - return super().update(attrs) + Args: + attrs: Changed Nautobot object attributes + inf_attrs: Infoblox objects attributes + ip_address: IP address of the record + Returns: + tuple (bool, str) + """ # Infoblox Host record acts as a combined A/PTR record. # Only allow creating/updating A and PTR record if IP Address doesn't have a corresponding Host record. # Only allows creating/updating Host record if IP Address doesn't have a corresponding A or PTR record. incompatible_record_types = False - # if attrs.get("has_a_record", False) and self.diffsync.config.create_a_record and inf_attrs["has_host_record"]: + incomp_msg = "" if ( attrs.get("has_a_record", False) and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD @@ -244,7 +244,6 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True elif ( attrs.get("has_ptr_record", False) - # and self.diffsync.config.create_ptr_record and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD and inf_attrs["has_host_record"] ): @@ -254,7 +253,6 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True elif ( attrs.get("has_host_record", False) - # and self.diffsync.config.create_host_record and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD and inf_attrs["has_a_record"] ): @@ -262,7 +260,6 @@ def update(self, attrs): # pylint: disable=too-many-branches incompatible_record_types = True elif ( attrs.get("has_host_record", False) - # and self.diffsync.config.create_host_record and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD and inf_attrs["has_ptr_record"] ): @@ -271,64 +268,119 @@ def update(self, attrs): # pylint: disable=too-many-branches ) incompatible_record_types = True + return incompatible_record_types, incomp_msg + + def update(self, attrs): # pylint: disable=too-many-branches,too-many-locals,too-many-statements + """Update IP Address object in Infoblox.""" + ids = self.get_identifiers() + inf_attrs = self.get_attrs() + ip_address = ids["address"] + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + + # Update fixed address + if inf_attrs.get("has_fixed_address"): + self._update_fixed_address( + new_attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address, network_view=network_view + ) + + # DNS record not needed, we can return + if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + return super().update(attrs) + + # Nautobot side doesn't check if dns name is a fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. + # We get either existing DNS name, or a new one. This is because name might be the same but we need to create a PTR record. + canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) + if not canonical_dns_name: + self.diffsync.job.logger.warning( + f"Cannot update Infoblox record for IP Address {ip_address}. DNS name is not defined." + ) + return super().update(attrs) + if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" + ) + return super().update(attrs) + + incompatible_record_types, incomp_msg = self._check_for_incompatible_record_types( + attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address + ) if incompatible_record_types: self.diffsync.job.logger.warning(incomp_msg) return super().update(attrs) dns_payload = {} ptr_payload = {} - if description: - dns_payload.update({"comment": description}) - ptr_payload.update({"comment": description}) + new_description = attrs.get("description") + if new_description: + dns_payload.update({"comment": new_description}) + ptr_payload.update({"comment": new_description}) if attrs.get("dns_name"): dns_payload.update({"name": attrs.get("dns_name")}) ptr_payload.update({"ptrdname": attrs.get("dns_name")}) - self.diffsync.job.logger.warning( - f"config {self.diffsync.config.dns_record_type}, {self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD}" - ) - a_record_action = "none" - ptr_record_action = "none" - host_record_action = "none" - # if self.diffsync.config.create_a_record and inf_attrs["has_a_record"]: + a_record_action = ptr_record_action = host_record_action = "none" if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: a_record_action = "update" if inf_attrs["has_a_record"] else "create" elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: - self.diffsync.job.logger.warning( - f"config {self.diffsync.config.dns_record_type}, {self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD}" - ) a_record_action = "update" if inf_attrs["has_a_record"] else "create" ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" - # if self.diffsync.config.create_host_record and inf_attrs["has_host_record"]: elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: host_record_action = "update" if inf_attrs["has_host_record"] else "create" # IP Address in Infoblox is not a plain IP Address like in Nautobot. - # In Infoblox we can fixed_address (not supported here), Host record for IP Address, or A Record for IP Address. + # In Infoblox we can have Fixed Address, Host record for IP Address, or A Record for IP Address. # When syncing from Nautobot to Infoblox we take IP Address and check if it has dns_name field populated. # We then combine this with the Infoblox Config toggles to arrive at the desired state in Infoblox. - self.diffsync.job.logger.warning(f"host_record_action {host_record_action}, dns_payload {dns_payload}") - self.diffsync.job.logger.warning(f"ptr_record_action {ptr_record_action}, dns_payload {dns_payload}") - self.diffsync.job.logger.warning(f"inf_attrs {inf_attrs}") if host_record_action == "update" and dns_payload: self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=dns_payload) + self.diffsync.job.logger.debug( + "Updated Host record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + dns_payload, + ) elif host_record_action == "create": self.diffsync.conn.create_host_record(canonical_dns_name, ip_address, network_view=network_view) + self.diffsync.job.logger.debug( + "Created Host record, address: %s, network_view: %s, DNS name: %s", + ip_address, + network_view, + canonical_dns_name, + ) if a_record_action == "update" and dns_payload: self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=dns_payload) + self.diffsync.job.logger.debug( + "Updated A record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + dns_payload, + ) elif a_record_action == "create": self.diffsync.conn.create_a_record(canonical_dns_name, ip_address, network_view=network_view) + self.diffsync.job.logger.debug( + "Created A record, address: %s, network_view: %s, DNS name: %s", + ip_address, + network_view, + canonical_dns_name, + ) if ptr_record_action == "update" and ptr_payload: self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=ptr_payload) + self.diffsync.job.logger.debug( + "Updated PTR record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + ptr_payload, + ) elif ptr_record_action == "create": self.diffsync.conn.create_ptr_record(canonical_dns_name, ip_address, network_view=network_view) + self.diffsync.job.logger.debug( + "Created PTR record, address: %s, network_view: %s, DNS name: %s", + ip_address, + network_view, + canonical_dns_name, + ) return super().update(attrs) - # def delete(self): - # """Delete an IP Address from Infoblox.""" - # self.diffsync.conn.delete_host_record(self.get_identifiers()["address"]) - # return super().delete() - class InfobloxNamespace(Namespace): """Infoblox implementation of the Namespace model.""" From 2ab590426388c38b6ef3af478f7127c4acb3a526 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:24:53 +0100 Subject: [PATCH 094/229] Tag imported namespace. Tidy up. --- .../infoblox/diffsync/models/nautobot.py | 24 +++++-------------- 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 46b4f45f8..5fc3e80f0 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -4,15 +4,16 @@ from django.core.exceptions import ValidationError from django.utils.text import slugify from nautobot.extras.choices import CustomFieldTypeChoices -from nautobot.extras.models import RelationshipAssociation as OrmRelationshipAssociation from nautobot.extras.models import CustomField as OrmCF +from nautobot.extras.models import RelationshipAssociation as OrmRelationshipAssociation from nautobot.ipam.choices import IPAddressRoleChoices, IPAddressTypeChoices +from nautobot.ipam.models import VLAN as OrmVlan from nautobot.ipam.models import IPAddress as OrmIPAddress +from nautobot.ipam.models import Namespace as OrmNamespace from nautobot.ipam.models import Prefix as OrmPrefix -from nautobot.ipam.models import VLAN as OrmVlan from nautobot.ipam.models import VLANGroup as OrmVlanGroup -from nautobot.ipam.models import Namespace as OrmNamespace -from nautobot_ssot.integrations.infoblox.diffsync.models.base import Namespace, Network, IPAddress, Vlan, VlanView + +from nautobot_ssot.integrations.infoblox.diffsync.models.base import IPAddress, Namespace, Network, Vlan, VlanView from nautobot_ssot.integrations.infoblox.utils.diffsync import ( create_tag_sync_from_infoblox, map_network_view_to_namespace, @@ -198,13 +199,6 @@ def update(self, attrs): # pylint: disable=too-many-branches _pf.validated_save() return super().update(attrs) - # def delete(self): - # """Delete Prefix object in Nautobot.""" - # self.diffsync.job.logger.warning(f"Prefix {self.network} will be deleted.") - # _prefix = OrmPrefix.objects.get(id=self.pk) - # _prefix.delete() - # return super().delete() - class NautobotIPAddress(IPAddress): """Nautobot implementation of the IPAddress Model.""" @@ -276,13 +270,6 @@ def update(self, attrs): self.diffsync.job.logger.warning(f"Error with updating IP Address {self.address}. {err}") return None - # def delete(self): - # """Delete IPAddress object in Nautobot.""" - # self.diffsync.job.logger.warning(f"IP Address {self.address} will be deleted.") - # _ipaddr = OrmIPAddress.objects.get(id=self.pk) - # _ipaddr.delete() - # return super().delete() - class NautobotVlanGroup(VlanView): """Nautobot implementation of the VLANView model.""" @@ -390,6 +377,7 @@ def create(cls, diffsync, ids, attrs): if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_ns, extattrs=attrs["ext_attrs"]) try: + _ns.tags.add(create_tag_sync_from_infoblox()) _ns.validated_save() diffsync.namespace_map[ids["name"]] = _ns.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) From 7c5b28c6f6187df2403a77de79f3588a06e81796 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:25:23 +0100 Subject: [PATCH 095/229] Remove print msg. --- nautobot_ssot/tests/infoblox/test_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/infoblox/test_client.py b/nautobot_ssot/tests/infoblox/test_client.py index f636ec713..424a325d7 100644 --- a/nautobot_ssot/tests/infoblox/test_client.py +++ b/nautobot_ssot/tests/infoblox/test_client.py @@ -569,7 +569,7 @@ def test_find_next_available_ip_success(self, mock_find_network_reference): req.post(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) next_ip = self.infoblox_client.find_next_available_ip(test_network) - print(next_ip) + self.assertEqual(next_ip, "10.220.0.1") @patch("nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi._find_network_reference") From a053effbe180ba3bf15299a5578cd08ec3b0c72c Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:26:05 +0100 Subject: [PATCH 096/229] Add full test coverage for IP Address create and update. --- .../tests/infoblox/test_infoblox_models.py | 1352 +++++++++++++++-- 1 file changed, 1214 insertions(+), 138 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index e71fd56fe..347c32fd2 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -1,5 +1,5 @@ +# pylint: disable=too-many-lines,too-many-public-methods """Unit tests for the Infoblox Diffsync models.""" - import unittest from unittest.mock import Mock @@ -46,7 +46,7 @@ def setUp(self): "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_nothing_get_created(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_nothing_gets_created(self, mock_tag_involved_objects, mock_validate_dns_name): """Validate nothing gets created if user selects DONT_CREATE_RECORD for DNS and Fixed Address options.""" nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) @@ -55,7 +55,6 @@ def test_ip_address_create_nothing_get_created(self, mock_tag_involved_objects, with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -91,7 +90,6 @@ def test_ip_address_create_a_record(self, mock_tag_involved_objects, mock_valida with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -211,7 +209,7 @@ def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_val autospec=True, ) def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure no record is created if DNS name is missing.""" + """Ensure DNS record is not created if DNS name is missing.""" nb_ipaddress_atrs = {"has_a_record": True, "dns_name": ""} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -236,6 +234,51 @@ def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_val mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=False, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_create_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS record is not created if DNS name is invalid.""" + nb_ipaddress_atrs = {"has_a_record": True, "dns_name": ".invalid-dns-name"} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" + ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -295,7 +338,6 @@ def test_ip_address_create_fixed_address_reserved_no_dns_name( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -334,7 +376,6 @@ def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mo with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -377,7 +418,6 @@ def test_ip_address_create_fixed_address_mac_no_dns_name(self, mock_tag_involved with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -422,7 +462,6 @@ def test_ip_address_create_fixed_address_reserved_with_host_record( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -469,7 +508,6 @@ def test_ip_address_create_fixed_address_reserved_with_a_record( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -516,7 +554,6 @@ def test_ip_address_create_fixed_address_reserved_with_a_and_ptr_record( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - # self.config.create_a_record = True self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) @@ -707,6 +744,10 @@ def setUp(self): self.nb_adapter = NautobotAdapter(config=self.config) self.nb_adapter.job = Mock() + ############ + # TEST Fixed Address record updates + ########### + @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, @@ -719,7 +760,7 @@ def setUp(self): def test_ip_address_update_fixed_address_type_reserved_name_and_comment( self, mock_tag_involved_objects, mock_validate_dns_name ): - """Ensure Fixed Address type RESERVED is updated.""" + """Ensure Fixed Address type RESERVED has DNS name and comment updated.""" nb_ipaddress_atrs = { "dns_name": "server2.local.test.net", "has_fixed_address": True, @@ -755,7 +796,9 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( infoblox_adapter.conn.update_fixed_address.assert_called_with( ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} ) + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() @@ -806,7 +849,9 @@ def test_ip_address_update_fixed_address_type_reserved_description_used_for_name infoblox_adapter.conn.update_fixed_address.assert_called_with( ref="fixedaddress/xyz", data={"name": "new description", "comment": "new description"} ) + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() @@ -822,9 +867,11 @@ def test_ip_address_update_fixed_address_type_reserved_description_used_for_name "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure Host record is updated.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + def test_ip_address_update_fixed_address_type_reserved_old_description_used_for_name( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type RESERVED is updated. With no DNS name and no changes to description, old description is used for name and comment.""" + nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "old description"} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -832,8 +879,8 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -843,24 +890,26 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", - "has_host_record": True, - "host_record_ref": "record:host/xyz", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_host_record.assert_called_once() - infoblox_adapter.conn.update_host_record.assert_called_with( - ref="record:host/xyz", data={"name": "server2.local.test.net"} + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "old description"} ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -871,9 +920,15 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure A record is updated.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} + def test_ip_address_update_fixed_address_type_mac_update_mac( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type MAC has MAC address updated.""" + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:ab", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -881,8 +936,8 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -892,24 +947,26 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "a_record_ref": "record:a/xyz", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "mac_address": "52:1f:83:d4:9a:2e", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_a_record.assert_called_once() - infoblox_adapter.conn.update_a_record.assert_called_with( - ref="record:a/xyz", data={"name": "server2.local.test.net"} + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"mac": "52:1f:83:d4:9a:ab"} ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -920,9 +977,15 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure PTR record is created if one doesn't currently exist.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} + def test_ip_address_update_fixed_address_type_mac_name_and_comment( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type MAC has DNS name and comment updated.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_fixed_address": True, + "description": "new description", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -930,8 +993,8 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -940,28 +1003,27 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "has_a_record": True, - "has_ptr_record": False, - "a_record_ref": "record:a/xyz", + "dns_name": "server1.local.test.net", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", } - print(_get_ip_address_dict(inf_ipaddress_atrs)) inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - print(infoblox_adapter.dict()) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} ) - infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -972,9 +1034,11 @@ def test_ip_address_create_ptr_record(self, mock_tag_involved_objects, mock_vali "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure A and PTR records are updated.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} + def test_ip_address_update_fixed_address_type_mac_description_used_for_name( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type MAC_ADDRESS is updated. With no DNS name description is used for name and comment.""" + nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "new description"} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -982,8 +1046,8 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -993,29 +1057,26 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "a_record_ref": "record:a/xyz", - "ptr_record_ref": "record:ptr/xyz", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_ptr_record.assert_called_once() - infoblox_adapter.conn.update_ptr_record.assert_called_with( - ref="record:ptr/xyz", data={"ptrdname": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_a_record.assert_called_once() - infoblox_adapter.conn.update_a_record.assert_called_with( - ref="record:a/xyz", data={"name": "server2.local.test.net"} + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "new description", "comment": "new description"} ) + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1026,9 +1087,11 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if an A record is marked for update but Infoblox already has a Host record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} + def test_ip_address_update_fixed_address_type_mac_old_description_used_for_name( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address type MAC_ADDRESS is updated. With no DNS name and no changes to description, old description is used for name and comment.""" + nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "old description"} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -1036,12 +1099,10 @@ def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_object with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, @@ -1049,24 +1110,30 @@ def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_object infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", - "has_host_record": True, - "host_record_ref": "record:host/xyz", + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "old description"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() - - log_msg = "Cannot update A Record for IP Address, 10.0.0.1. It already has an existing Host Record." - job_logger.warning.assert_called_with(log_msg) mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() + + ########################### + # DNS Record Update tests + ########################### @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1077,9 +1144,9 @@ def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_object "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if PTR record is marked for update but Infoblox already has a Host record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_ptr_record": True} + def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure only Host record is updated.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True, "has_fixed_address": False} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -1088,11 +1155,9 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, @@ -1102,20 +1167,22 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje "dns_name": "server1.local.test.net", "has_host_record": True, "host_record_ref": "record:host/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_host_record.assert_called_once() + infoblox_adapter.conn.update_host_record.assert_called_with( + ref="record:host/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() - - log_msg = ( - "Cannot create/update PTR Record for IP Address, 10.0.0.1. It already has an existing Host Record." - ) - job_logger.warning.assert_called_with(log_msg) - + infoblox_adapter.conn.update_fixed_address.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( @@ -1131,9 +1198,9 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if Host record is marked for update but Infoblox already has an A record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + def test_ip_address_update_create_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure Host record is created during update if one doesn't exist. This can happen if fixed address currently exist and config was updated to enable host record creation.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "has_host_record": True, "has_fixed_address": False} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -1141,13 +1208,10 @@ def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_object with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = True - self.config.create_a_record = False - self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, @@ -1155,24 +1219,27 @@ def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_object infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "a_record_ref": "record:a/xyz", + "has_host_record": False, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() - - log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing A Record." - job_logger.warning.assert_called_with(log_msg) - + infoblox_adapter.conn.update_fixed_address.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" ) @unittest.mock.patch( @@ -1184,9 +1251,9 @@ def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_object "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if Host record is marked for update but Infoblox already has a PTR record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure only A record is updated.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_fixed_address": False} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -1194,13 +1261,183 @@ def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_obje with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.create_host_record = True - self.config.create_a_record = False - self.config.create_ptr_record = False + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "a_record_ref": "record:a/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_create_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure A record is created during update if one doesn't exist. This can happen if fixed address currently exist and config was updated to enable A record creation.""" + nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "has_a_record": True, "has_fixed_address": False} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": False, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_create_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure PTR record is created if one doesn't currently exist.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "has_fixed_address": False, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_a_record": True, + "has_ptr_record": False, + "a_record_ref": "record:a/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure A and PTR records are updated.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "has_fixed_address": False, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, @@ -1208,21 +1445,860 @@ def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_obje infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { "dns_name": "server1.local.test.net", + "has_a_record": True, "has_ptr_record": True, + "a_record_ref": "record:a/xyz", "ptr_record_ref": "record:ptr/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_called_once() + infoblox_adapter.conn.update_ptr_record.assert_called_with( + ref="record:ptr/xyz", data={"ptrdname": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) - log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing PTR Record." + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if an A record is marked for update but Infoblox already has a Host record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "host_record_ref": "record:host/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + + log_msg = "Cannot update A Record for IP Address, 10.0.0.1. It already has an existing Host Record." job_logger.warning.assert_called_with(log_msg) + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if PTR record is marked for update but Infoblox already has a Host record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_ptr_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "host_record_ref": "record:host/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + + log_msg = ( + "Cannot create/update PTR Record for IP Address, 10.0.0.1. It already has an existing Host Record." + ) + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if Host record is marked for update but Infoblox already has an A record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "a_record_ref": "record:a/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + + log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing A Record." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure update fails if Host record is marked for update but Infoblox already has a PTR record.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_ptr_record": True, + "ptr_record_ref": "record:ptr/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + mock_validate_dns_name.assert_called_once() + + log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing PTR Record." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_no_dns_updates(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS update/create is not trigerred if user configures DONT_CREATE_RECORD for dns_record_type.""" + nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "a_record_ref": "record:a/xyz", + "ptr_record_ref": "record:ptr/xyz", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + ############## + # Update Fixed Address and Update/Create DNS Record + ############## + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_reservation_and_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address RESERVED and Host records are updated together.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_host_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "has_fixed_address": True, + "host_record_ref": "record:host/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.update_host_record.assert_called_once() + infoblox_adapter.conn.update_host_record.assert_called_with( + ref="record:host/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address RESERVED and A+PTR records are updated together.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_a_record": True, + "has_ptr_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "has_fixed_address": True, + "a_record_ref": "record:a/xyz", + "ptr_record_ref": "record:ptr/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_ptr_record.assert_called_once() + infoblox_adapter.conn.update_ptr_record.assert_called_with( + ref="record:ptr/xyz", data={"comment": "new description", "ptrdname": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_mac_and_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address MAC and Host records are updated together.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_host_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": True, + "has_fixed_address": True, + "host_record_ref": "record:host/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.update_host_record.assert_called_once() + infoblox_adapter.conn.update_host_record.assert_called_with( + ref="record:host/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address MAC and A+PTR records are updated together.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_a_record": True, + "has_ptr_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": True, + "has_ptr_record": True, + "has_fixed_address": True, + "a_record_ref": "record:a/xyz", + "ptr_record_ref": "record:ptr/xyz", + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.update_a_record.assert_called_once() + infoblox_adapter.conn.update_a_record.assert_called_with( + ref="record:a/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_ptr_record.assert_called_once() + infoblox_adapter.conn.update_ptr_record.assert_called_with( + ref="record:ptr/xyz", data={"comment": "new description", "ptrdname": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_reservation_and_create_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address RESERVED is updated and Host record is created.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_host_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": False, + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_reservation_and_create_a_and_ptr_records( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address RESERVED is updated and A+PTR records are created.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_a_record": True, + "has_ptr_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": False, + "has_ptr_record": False, + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_mac_address_reservation_and_create_host_record( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address MAC is updated and Host record is created.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_host_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_host_record": False, + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( + self, mock_tag_involved_objects, mock_validate_dns_name + ): + """Ensure Fixed Address MAC is updated and A+PTR records are created.""" + nb_ipaddress_atrs = { + "dns_name": "server2.local.test.net", + "description": "new description", + "has_fixed_address": True, + "has_a_record": True, + "has_ptr_record": True, + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "has_a_record": False, + "has_ptr_record": False, + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "description": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + ) + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ) + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( From c44b2d4d21197e300e97c7341dd8c03daf3fecb0 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:26:49 +0100 Subject: [PATCH 097/229] Update model tests. --- nautobot_ssot/tests/infoblox/test_models.py | 93 ++------------------- 1 file changed, 9 insertions(+), 84 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_models.py b/nautobot_ssot/tests/infoblox/test_models.py index bf62ad911..4745004ad 100644 --- a/nautobot_ssot/tests/infoblox/test_models.py +++ b/nautobot_ssot/tests/infoblox/test_models.py @@ -12,7 +12,7 @@ ) from nautobot.extras.models import ExternalIntegration, Secret, SecretsGroup, SecretsGroupAssociation, Status - +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig @@ -82,9 +82,8 @@ def setUp(self): "infoblox_sync_filters": sync_filters, "infoblox_dns_view_mapping": {"default": "default.default"}, "cf_fields_ignore": {"extensible_attributes": [], "custom_fields": []}, - "create_a_record": False, - "create_host_record": True, - "create_ptr_record": False, + "fixed_address_type": FixedAddressTypeChoices.DONT_CREATE_RECORD, + "dns_record_type": DNSRecordTypeChoices.HOST_RECORD, } def test_create_infoblox_config_required_fields_only(self): @@ -113,9 +112,8 @@ def test_create_infoblox_config_required_fields_only(self): self.assertEqual(inf_cfg_db.cf_fields_ignore, {"custom_fields": [], "extensible_attributes": []}) self.assertEqual(inf_cfg_db.import_ipv4, True) self.assertEqual(inf_cfg_db.import_ipv6, False) - self.assertEqual(inf_cfg_db.create_host_record, True) - self.assertEqual(inf_cfg_db.create_a_record, False) - self.assertEqual(inf_cfg_db.create_ptr_record, False) + self.assertEqual(inf_cfg_db.fixed_address_type, FixedAddressTypeChoices.DONT_CREATE_RECORD) + self.assertEqual(inf_cfg_db.dns_record_type, DNSRecordTypeChoices.HOST_RECORD) self.assertEqual(inf_cfg_db.job_enabled, False) def test_create_infoblox_config_all_fields(self): @@ -136,9 +134,8 @@ def test_create_infoblox_config_all_fields(self): infoblox_sync_filters=[{"network_view": "dev"}], infoblox_dns_view_mapping={"default": "default.default"}, cf_fields_ignore={"extensible_attributes": ["aws_id"], "custom_fields": ["po_no"]}, - create_a_record=True, - create_host_record=False, - create_ptr_record=True, + fixed_address_type=FixedAddressTypeChoices.MAC_ADDRESS, + dns_record_type=DNSRecordTypeChoices.A_RECORD, ) inf_cfg.validated_save() @@ -159,9 +156,8 @@ def test_create_infoblox_config_all_fields(self): self.assertEqual(inf_cfg_db.cf_fields_ignore, {"extensible_attributes": ["aws_id"], "custom_fields": ["po_no"]}) self.assertEqual(inf_cfg_db.import_ipv4, False) self.assertEqual(inf_cfg_db.import_ipv6, True) - self.assertEqual(inf_cfg_db.create_host_record, False) - self.assertEqual(inf_cfg_db.create_a_record, True) - self.assertEqual(inf_cfg_db.create_ptr_record, True) + self.assertEqual(inf_cfg_db.fixed_address_type, FixedAddressTypeChoices.MAC_ADDRESS) + self.assertEqual(inf_cfg_db.dns_record_type, DNSRecordTypeChoices.A_RECORD) self.assertEqual(inf_cfg_db.job_enabled, True) def test_infoblox_sync_filters_must_be_a_list(self): @@ -372,77 +368,6 @@ def test_infoblox_import_ip_at_least_one_chosen(self): "At least one of `import_ipv4` or `import_ipv6` must be set to True.", ) - def test_infoblox_incompatible_ip_address_create_options(self): - """Only one of `create_a_record` or `create_host_record` can be enabled at any given time. - `create_ptr` cannot be used with `create_host_record`. - """ - inf_dict = deepcopy(self.infoblox_config_dict) - inf_dict["create_a_record"] = True - inf_dict["create_host_record"] = True - infoblox_config = SSOTInfobloxConfig(**inf_dict) - with self.assertRaises(ValidationError) as failure_exception: - infoblox_config.full_clean() - self.assertIn("create_a_record", failure_exception.exception.error_dict) - self.assertIn("create_host_record", failure_exception.exception.error_dict) - self.assertEqual( - failure_exception.exception.error_dict["create_a_record"][0].message, - "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", - ) - self.assertEqual( - failure_exception.exception.error_dict["create_host_record"][0].message, - "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", - ) - - inf_dict["create_a_record"] = False - inf_dict["create_ptr_record"] = True - inf_dict["create_host_record"] = True - infoblox_config = SSOTInfobloxConfig(**inf_dict) - with self.assertRaises(ValidationError) as failure_exception: - infoblox_config.full_clean() - self.assertIn("create_host_record", failure_exception.exception.error_dict) - self.assertIn("create_ptr_record", failure_exception.exception.error_dict) - self.assertEqual( - failure_exception.exception.error_dict["create_host_record"][0].message, - "`create_ptr_record` can be used with `create_a_record` only.", - ) - self.assertEqual( - failure_exception.exception.error_dict["create_ptr_record"][0].message, - "`create_ptr_record` can be used with `create_a_record` only.", - ) - - def test_infoblox_ptr_record_requires_a_record(self): - """Using `create_ptr_record` required `create_a_record` to be enabled.""" - inf_dict = deepcopy(self.infoblox_config_dict) - inf_dict["create_host_record"] = False - inf_dict["create_a_record"] = False - inf_dict["create_ptr_record"] = True - infoblox_config = SSOTInfobloxConfig(**inf_dict) - with self.assertRaises(ValidationError) as failure_exception: - infoblox_config.full_clean() - self.assertIn("create_ptr_record", failure_exception.exception.error_dict) - self.assertEqual( - failure_exception.exception.messages[0], "To use `create_ptr_record` you must enable `create_a_record`." - ) - - def test_infoblox_at_least_one_of_a_or_host_record_required(self): - """At least one of `create_a_record` or `create_host_record` must be selected.""" - inf_dict = deepcopy(self.infoblox_config_dict) - inf_dict["create_a_record"] = False - inf_dict["create_host_record"] = False - infoblox_config = SSOTInfobloxConfig(**inf_dict) - with self.assertRaises(ValidationError) as failure_exception: - infoblox_config.full_clean() - self.assertIn("create_a_record", failure_exception.exception.error_dict) - self.assertIn("create_host_record", failure_exception.exception.error_dict) - self.assertEqual( - failure_exception.exception.error_dict["create_a_record"][0].message, - "Either `create_a_record` or `create_host_record` must be enabled.", - ) - self.assertEqual( - failure_exception.exception.error_dict["create_host_record"][0].message, - "Either `create_a_record` or `create_host_record` must be enabled.", - ) - def test_infoblox_infoblox_dns_view_mapping_must_be_dict(self): """Value of `infoblox_dns_view_mapping` key must be a dict.""" inf_dict = deepcopy(self.infoblox_config_dict) From b6f4a4ba6c0dbad8abfa83c69cb954d22b93eb44 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:27:18 +0100 Subject: [PATCH 098/229] Mock job object. --- nautobot_ssot/tests/infoblox/test_nautobot_adapter.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 650e89183..5051c2b38 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -1,5 +1,7 @@ """Nautobot Adapter tests.""" +from unittest import mock + from django.contrib.contenttypes.models import ContentType from django.test import TestCase @@ -156,6 +158,7 @@ def setUp(self): self.config = create_default_infoblox_config() self.sync_filters = self.config.infoblox_sync_filters self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = mock.Mock() def test_load_vlans_loads_expected_vlans(self): self.nb_adapter.load_vlans() From 30135b8ffa30c320cddb673e89d0c6eed4237a7d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 5 Jun 2024 14:27:59 +0100 Subject: [PATCH 099/229] Additional utils tests. --- nautobot_ssot/tests/infoblox/test_utils.py | 116 ++++++++++++++++++++- 1 file changed, 114 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_utils.py b/nautobot_ssot/tests/infoblox/test_utils.py index fde91382e..27f9b2768 100644 --- a/nautobot_ssot/tests/infoblox/test_utils.py +++ b/nautobot_ssot/tests/infoblox/test_utils.py @@ -1,16 +1,20 @@ """Util tests that do not require Django.""" import unittest +import unittest.mock from django.test import TestCase - from nautobot.extras.models import Status from nautobot.ipam.models import VLAN, VLANGroup from nautobot_ssot.integrations.infoblox.utils.diffsync import ( + get_default_custom_fields, + get_ext_attr_dict, + get_valid_custom_fields, get_vlan_view_name, + map_network_view_to_namespace, nautobot_vlan_status, - get_ext_attr_dict, + validate_dns_name, ) from nautobot_ssot.integrations.infoblox.utils.nautobot import build_vlan_map_from_relations @@ -37,6 +41,114 @@ def test_get_ext_attr_dict(self): standardized_dict = get_ext_attr_dict(test_dict) self.assertEqual(standardized_dict, expected) + def test_get_ext_attr_dict_slugify(self): + """Test get_ext_attr_dict slugifies keys.""" + test_dict = {"Site-Loc": {"value": "NTC"}, "Region": {"value": "Central"}} + expected = {"site_loc": "NTC", "region": "Central"} + standardized_dict = get_ext_attr_dict(test_dict) + self.assertEqual(standardized_dict, expected) + + def test_get_ext_attr_dict_exclusion_list(self): + """Test get_ext_attr_dict correctly excludes attributes.""" + test_dict = {"Site": {"value": "HQ"}, "Region": {"value": "Central"}, "Tenant": {"value": "NTC"}} + excluded_attrs = ["Tenant"] + expected = {"site": "HQ", "region": "Central"} + standardized_dict = get_ext_attr_dict(extattrs=test_dict, excluded_attrs=excluded_attrs) + self.assertEqual(standardized_dict, expected) + + def test_validate_dns_name(self): + """Test validate_dns_name.""" + client = unittest.mock.Mock() + client.get_dns_view_for_network_view = unittest.mock.Mock(return_value="default.dev") + client.get_authoritative_zones_for_dns_view = unittest.mock.Mock( + return_value=[ + { + "fqdn": "nautobot.local.dev", + }, + { + "fqdn": "nautobot.local.test", + }, + ] + ) + + valid_name = "server1.nautobot.local.dev" + invalid_name = "server1.nautobot.local.prod" + + self.assertEqual(False, validate_dns_name(client, invalid_name, "dev")) + self.assertEqual(True, validate_dns_name(client, valid_name, "dev")) + + def test_map_network_view_to_namespace(self): + """Test map_network_view_to_namespace.""" + network_view1 = "dev" + network_view2 = "default" + + namespace1 = "test" + namespace2 = "Global" + + self.assertEqual("dev", map_network_view_to_namespace(value=network_view1, direction="nv_to_ns")) + self.assertEqual("Global", map_network_view_to_namespace(value=network_view2, direction="nv_to_ns")) + self.assertEqual("test", map_network_view_to_namespace(value=namespace1, direction="ns_to_nv")) + self.assertEqual("default", map_network_view_to_namespace(value=namespace2, direction="ns_to_nv")) + + def test_get_valid_custom_fields(self): + """Test get_valid_custom_fields.""" + excluded_cfs = ["synced_to_snow"] + + cfs1 = {"ssot_synced_to_infoblox": True, "dhcp_ranges": [], "mac_address": "", "vlan": 100} + cfs2 = {"tenant": "NTC", "synced_to_snow": True} + + expected1 = {"vlan": 100} + expected2 = {"tenant": "NTC"} + + self.assertEqual(expected1, get_valid_custom_fields(cfs=cfs1)) + self.assertEqual(expected2, get_valid_custom_fields(cfs=cfs2, excluded_cfs=excluded_cfs)) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.diffsync.CustomField", + autospec=True, + ) + def test_get_default_custom_fields(self, custom_field): + """Test get_default_custom_fields.""" + content_type = unittest.mock.Mock() + cf1 = unittest.mock.Mock() + cf2 = unittest.mock.Mock() + cf_def_excl1 = unittest.mock.Mock() + cf_def_excl2 = unittest.mock.Mock() + cf1.key = "tenant" + cf2.key = "site" + cf_def_excl1.key = "ssot_synced_to_infoblox" + cf_def_excl2.key = "dhcp_ranges" + + custom_field.objects.filter.return_value = [cf1, cf2, cf_def_excl1, cf_def_excl2] + + expected = {"tenant": None, "site": None} + + result = get_default_custom_fields(cf_contenttype=content_type) + self.assertEqual(expected, result) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.diffsync.CustomField", + autospec=True, + ) + def test_get_default_custom_fields_excluded(self, custom_field): + """Test get_default_custom_fields with excluded cfs.""" + content_type = unittest.mock.Mock() + cf1 = unittest.mock.Mock() + cf2 = unittest.mock.Mock() + cf3 = unittest.mock.Mock() + cf4 = unittest.mock.Mock() + cf1.key = "tenant" + cf2.key = "site" + cf3.key = "snow_synced" + cf4.key = "vlan" + excluded_cfs = ["snow_synced", "vlan"] + custom_field.objects.filter.return_value = [cf1, cf2, cf3, cf4] + + expected = {"tenant": None, "site": None} + + result = get_default_custom_fields(cf_contenttype=content_type, excluded_cfs=excluded_cfs) + self.assertEqual(expected, result) + class TestNautobotUtils(TestCase): """Test infoblox.utils.nautobot.py.""" From 9660ccf85304cb1e840c36303700813c725f1c02 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 7 Jun 2024 20:44:55 +0100 Subject: [PATCH 100/229] Update infoblox diffsync model tests. --- .../tests/infoblox/test_infoblox_models.py | 626 ++++++++++++------ 1 file changed, 432 insertions(+), 194 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index 347c32fd2..4b8dc01d5 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -13,23 +13,137 @@ def _get_ip_address_dict(attrs): - ipaddress_dict = dict( # pylint: disable=use-dict-literal - description="Test IPAddress", - address="10.0.0.1", - status="Active", - prefix="10.0.0.0/8", - prefix_length=8, - ip_addr_type="host", - namespace="Global", - dns_name="", - ) + """Build dict used for creating diffsync IP address.""" + ipaddress_dict = { + "description": "Test IPAddress", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "ip_addr_type": "host", + "namespace": "Global", + "dns_name": "", + } ipaddress_dict.update(attrs) return ipaddress_dict +def _get_network_dict(attrs): + """Build dict used for creating diffsync network.""" + network_dict = { + "network": "10.0.0.0/8", + "description": "TestNetwork", + "namespace": "Global", + "status": "Active", + } + network_dict.update(attrs) + + return network_dict + + +class TestModelInfobloxNetwork(TestCase): + """Tests correct network record is created.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_network_create_network(self, mock_tag_involved_objects): + """Validate network gets created.""" + nb_network_atrs = {"network_type": "network"} + nb_ds_network = self.nb_adapter.prefix(**_get_network_dict(nb_network_atrs)) + self.nb_adapter.add(nb_ds_network) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_network.assert_called_once() + infoblox_adapter.conn.create_network.assert_called_with( + prefix="10.0.0.0/8", comment="TestNetwork", network_view="default" + ) + infoblox_adapter.conn.create_network_container.assert_not_called() + mock_tag_involved_objects.assert_called_once() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_network_create_network_container(self, mock_tag_involved_objects): + """Validate network container gets created.""" + nb_network_atrs = {"network_type": "container"} + nb_ds_network = self.nb_adapter.prefix(**_get_network_dict(nb_network_atrs)) + self.nb_adapter.add(nb_ds_network) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.create_network_container.assert_called_once() + infoblox_adapter.conn.create_network_container.assert_called_with( + prefix="10.0.0.0/8", comment="TestNetwork", network_view="default" + ) + infoblox_adapter.conn.create_network.assert_not_called() + mock_tag_involved_objects.assert_called_once() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_network_update_network(self, mock_tag_involved_objects): + """Validate network gets updated.""" + nb_network_atrs = { + "description": "New Description", + } + nb_ds_network = self.nb_adapter.prefix(**_get_network_dict(nb_network_atrs)) + self.nb_adapter.add(nb_ds_network) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_network_atrs = { + "description": "Old Description", + } + inf_ds_network = infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + infoblox_adapter.add(inf_ds_network) + infoblox_adapter.job = Mock() + self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_network.assert_called_once() + infoblox_adapter.conn.update_network.assert_called_with( + prefix="10.0.0.0/8", comment="New Description", network_view="default" + ) + mock_tag_involved_objects.assert_called_once() + + class TestModelInfobloxIPAddressCreate(TestCase): - """Tests correct DNS record is created.""" + """Tests correct Fixed Address and DNS record are created.""" def setUp(self): "Test class set up." @@ -102,7 +216,7 @@ def test_ip_address_create_a_record(self, mock_tag_involved_objects, mock_valida self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_not_called() @@ -144,11 +258,11 @@ def test_ip_address_create_a_and_ptr_record(self, mock_tag_involved_objects, moc self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() @@ -191,7 +305,7 @@ def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_val infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -291,7 +405,11 @@ def test_ip_address_create_invalid_dns_name(self, mock_tag_involved_objects, moc ) def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_objects, mock_validate_dns_name): """Validate Fixed Address type RESERVED is created.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ipaddress_atrs = { + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -310,7 +428,11 @@ def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_object self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ip_address="10.0.0.1", + name="FixedAddresReserved", + comment="Fixed Address Reservation", + match_client="RESERVED", + network_view="default", ) infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() @@ -327,11 +449,11 @@ def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_object "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_reserved_no_dns_name( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type RESERVED is created with description used for name.""" - nb_ipaddress_atrs = {"dns_name": "", "description": "server1"} + def test_ip_address_create_fixed_address_reserved_no_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type RESERVED is created with empty name.""" + nb_ipaddress_atrs = { + "has_fixed_address": True, + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -350,7 +472,11 @@ def test_ip_address_create_fixed_address_reserved_no_dns_name( self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", name="server1", match_client="RESERVED", network_view="default" + ip_address="10.0.0.1", + name="", + comment="", + match_client="RESERVED", + network_view="default", ) infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() @@ -369,7 +495,12 @@ def test_ip_address_create_fixed_address_reserved_no_dns_name( ) def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mock_validate_dns_name): """Validate Fixed Address type MAC_ADDRESS is created.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ipaddress_atrs = { + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -389,7 +520,8 @@ def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mo infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( ip_address="10.0.0.1", - name="server1.local.test.net", + name="FixedAddresReserved", + comment="Fixed Address Reservation", mac_address="52:1f:83:d4:9a:2e", match_client="MAC_ADDRESS", network_view="default", @@ -409,9 +541,12 @@ def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mo "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_mac_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): - """Validate Fixed Address type MAC_ADDRESS is created with description used for name.""" - nb_ipaddress_atrs = {"dns_name": "", "description": "server1", "mac_address": "52:1f:83:d4:9a:2e"} + def test_ip_address_create_fixed_address_mac_no_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Fixed Address type MAC is created with empty name.""" + nb_ipaddress_atrs = { + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -431,7 +566,8 @@ def test_ip_address_create_fixed_address_mac_no_dns_name(self, mock_tag_involved infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( ip_address="10.0.0.1", - name="server1", + name="", + comment="", mac_address="52:1f:83:d4:9a:2e", match_client="MAC_ADDRESS", network_view="default", @@ -455,7 +591,12 @@ def test_ip_address_create_fixed_address_reserved_with_host_record( self, mock_tag_involved_objects, mock_validate_dns_name ): """Validate Fixed Address type RESERVED is created with DNS Host record.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -474,13 +615,17 @@ def test_ip_address_create_fixed_address_reserved_with_host_record( self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ip_address="10.0.0.1", + name="FixedAddresReserved", + comment="Fixed Address Reservation", + match_client="RESERVED", + network_view="default", ) infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -501,7 +646,12 @@ def test_ip_address_create_fixed_address_reserved_with_a_record( self, mock_tag_involved_objects, mock_validate_dns_name ): """Validate Fixed Address type RESERVED is created with DNS A record.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -520,13 +670,17 @@ def test_ip_address_create_fixed_address_reserved_with_a_record( self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ip_address="10.0.0.1", + name="FixedAddresReserved", + comment="Fixed Address Reservation", + match_client="RESERVED", + network_view="default", ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -547,7 +701,12 @@ def test_ip_address_create_fixed_address_reserved_with_a_and_ptr_record( self, mock_tag_involved_objects, mock_validate_dns_name ): """Validate Fixed Address type RESERVED is created with DNS A and PTR records.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -566,16 +725,20 @@ def test_ip_address_create_fixed_address_reserved_with_a_and_ptr_record( self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", name="server1.local.test.net", match_client="RESERVED", network_view="default" + ip_address="10.0.0.1", + name="FixedAddresReserved", + comment="Fixed Address Reservation", + match_client="RESERVED", + network_view="default", ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -596,7 +759,13 @@ def test_ip_address_create_fixed_address_mac_with_host_record( self, mock_tag_involved_objects, mock_validate_dns_name ): """Validate Fixed Address type MAC_ADDRESS is created with DNS Host record.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -616,7 +785,8 @@ def test_ip_address_create_fixed_address_mac_with_host_record( infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( ip_address="10.0.0.1", - name="server1.local.test.net", + name="FixedAddresReserved", + comment="Fixed Address Reservation", mac_address="52:1f:83:d4:9a:2e", match_client="MAC_ADDRESS", network_view="default", @@ -625,7 +795,7 @@ def test_ip_address_create_fixed_address_mac_with_host_record( infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -644,7 +814,13 @@ def test_ip_address_create_fixed_address_mac_with_host_record( ) def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): """Validate Fixed Address type MAC_ADDRESS is created with DNS A record.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -664,7 +840,8 @@ def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involv infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( ip_address="10.0.0.1", - name="server1.local.test.net", + name="FixedAddresReserved", + comment="Fixed Address Reservation", mac_address="52:1f:83:d4:9a:2e", match_client="MAC_ADDRESS", network_view="default", @@ -673,7 +850,7 @@ def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involv infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -694,7 +871,13 @@ def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( self, mock_tag_involved_objects, mock_validate_dns_name ): """Validate Fixed Address type MAC_ADDRESS is created with DNS A and PTR records.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ipaddress_atrs = { + "dns_name": "server1.local.test.net", + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -714,7 +897,8 @@ def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( infoblox_adapter.conn.create_fixed_address.assert_called_once() infoblox_adapter.conn.create_fixed_address.assert_called_with( ip_address="10.0.0.1", - name="server1.local.test.net", + name="FixedAddresReserved", + comment="Fixed Address Reservation", mac_address="52:1f:83:d4:9a:2e", match_client="MAC_ADDRESS", network_view="default", @@ -722,11 +906,11 @@ def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() @@ -760,11 +944,11 @@ def setUp(self): def test_ip_address_update_fixed_address_type_reserved_name_and_comment( self, mock_tag_involved_objects, mock_validate_dns_name ): - """Ensure Fixed Address type RESERVED has DNS name and comment updated.""" + """Ensure Fixed Address type RESERVED has name and comment updated.""" nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", "has_fixed_address": True, - "description": "new description", + "fixed_address_name": "server2.local.test.net", + "fixed_address_comment": "new description", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -783,11 +967,11 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", - "description": "old description", + "fixed_address_name": "server1.local.test.net", + "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -814,64 +998,15 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_reserved_description_used_for_name( + def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty( self, mock_tag_involved_objects, mock_validate_dns_name ): - """Ensure Fixed Address type RESERVED is updated. With no DNS name description is used for name and comment.""" - nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "new description"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "description": "old description", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "new description", "comment": "new description"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_update_fixed_address_type_reserved_old_description_used_for_name( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type RESERVED is updated. With no DNS name and no changes to description, old description is used for name and comment.""" - nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "old description"} + """Ensure Fixed Address type RESERVED has name and comment set to empty string.""" + nb_ipaddress_atrs = { + "has_fixed_address": True, + "fixed_address_name": "", + "fixed_address_comment": "", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -889,18 +1024,18 @@ def test_ip_address_update_fixed_address_type_reserved_old_description_used_for_ ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", - "description": "old description", + "fixed_address_name": "server1.local.test.net", + "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "old description"} + ref="fixedaddress/xyz", data={"name": "", "comment": ""} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -980,11 +1115,11 @@ def test_ip_address_update_fixed_address_type_mac_update_mac( def test_ip_address_update_fixed_address_type_mac_name_and_comment( self, mock_tag_involved_objects, mock_validate_dns_name ): - """Ensure Fixed Address type MAC has DNS name and comment updated.""" + """Ensure Fixed Address type MAC has name and comment updated.""" nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", + "fixed_address_name": "server2.local.test.net", "has_fixed_address": True, - "description": "new description", + "fixed_address_comment": "new description", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -1003,11 +1138,11 @@ def test_ip_address_update_fixed_address_type_mac_name_and_comment( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", + "fixed_address_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", - "description": "old description", + "fixed_address_comment": "old description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -1034,64 +1169,15 @@ def test_ip_address_update_fixed_address_type_mac_name_and_comment( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_mac_description_used_for_name( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type MAC_ADDRESS is updated. With no DNS name description is used for name and comment.""" - nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "new description"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "description": "old description", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "new description", "comment": "new description"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_update_fixed_address_type_mac_old_description_used_for_name( + def test_ip_address_update_fixed_address_type_mac_name_and_comment_empty( self, mock_tag_involved_objects, mock_validate_dns_name ): - """Ensure Fixed Address type MAC_ADDRESS is updated. With no DNS name and no changes to description, old description is used for name and comment.""" - nb_ipaddress_atrs = {"dns_name": "", "has_fixed_address": True, "description": "old description"} + """Ensure Fixed Address type MAC has name and comment set to empty string.""" + nb_ipaddress_atrs = { + "has_fixed_address": True, + "fixed_address_name": "", + "fixed_address_comment": "", + } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -1109,18 +1195,18 @@ def test_ip_address_update_fixed_address_type_mac_old_description_used_for_name( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", - "description": "old description", + "fixed_address_name": "server1.local.test.net", + "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "old description"} + ref="fixedaddress/xyz", data={"name": "", "comment": ""} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -1228,7 +1314,7 @@ def test_ip_address_update_create_host_record(self, mock_tag_involved_objects, m self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() @@ -1335,7 +1421,7 @@ def test_ip_address_update_create_a_record(self, mock_tag_involved_objects, mock self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -1396,7 +1482,7 @@ def test_ip_address_update_create_ptr_record(self, mock_tag_involved_objects, mo infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -1747,6 +1833,126 @@ def test_ip_address_update_no_dns_updates(self, mock_tag_involved_objects, mock_ mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_not_called() + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_create_fixed_address_reserved(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure Fixed Address Reserved is created with DNS record in place, no FA in Infoblox, and config asking for Reserved IP creation.""" + nb_ipaddress_atrs = { + "has_a_record": True, + "has_fixed_address": True, + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "has_a_record": True, + "has_fixed_address": False, + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="FixedAddresReserved", + comment="Fixed Address Reservation", + match_client="RESERVED", + network_view="default", + ) + infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=True, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ip_address_update_create_fixed_address_mac(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure Fixed Address MAC is created with DNS record in place, no FA in Infoblox, and config asking for MAC IP creation.""" + nb_ipaddress_atrs = { + "has_a_record": True, + "mac_address": "52:1f:83:d4:9a:2e", + "has_fixed_address": True, + "fixed_address_name": "FixedAddresReserved", + "fixed_address_comment": "Fixed Address Reservation", + } + nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) + self.nb_adapter.add(nb_ds_ipaddress) + self.nb_adapter.load() + + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + inf_ipaddress_atrs = { + "has_a_record": True, + "has_fixed_address": False, + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) + self.nb_adapter.sync_to(infoblox_adapter) + + infoblox_adapter.conn.create_fixed_address.assert_called_once() + infoblox_adapter.conn.create_fixed_address.assert_called_with( + ip_address="10.0.0.1", + name="FixedAddresReserved", + mac_address="52:1f:83:d4:9a:2e", + comment="Fixed Address Reservation", + match_client="MAC_ADDRESS", + network_view="default", + ) + infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() + ############## # Update Fixed Address and Update/Create DNS Record ############## @@ -1769,6 +1975,8 @@ def test_ip_address_update_fixed_address_reservation_and_host_record( "description": "new description", "has_fixed_address": True, "has_host_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -1794,6 +2002,8 @@ def test_ip_address_update_fixed_address_reservation_and_host_record( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -1805,7 +2015,7 @@ def test_ip_address_update_fixed_address_reservation_and_host_record( ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() @@ -1837,6 +2047,8 @@ def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( "has_fixed_address": True, "has_a_record": True, "has_ptr_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -1864,6 +2076,8 @@ def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -1879,7 +2093,7 @@ def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -1909,6 +2123,8 @@ def test_ip_address_update_fixed_address_mac_and_host_record( "description": "new description", "has_fixed_address": True, "has_host_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -1934,6 +2150,8 @@ def test_ip_address_update_fixed_address_mac_and_host_record( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -1945,7 +2163,7 @@ def test_ip_address_update_fixed_address_mac_and_host_record( ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() @@ -1977,6 +2195,8 @@ def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( "has_fixed_address": True, "has_a_record": True, "has_ptr_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -2004,6 +2224,8 @@ def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -2019,7 +2241,7 @@ def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -2049,6 +2271,8 @@ def test_ip_address_update_fixed_address_reservation_and_create_host_record( "description": "new description", "has_fixed_address": True, "has_host_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -2073,6 +2297,8 @@ def test_ip_address_update_fixed_address_reservation_and_create_host_record( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -2080,11 +2306,11 @@ def test_ip_address_update_fixed_address_reservation_and_create_host_record( infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() @@ -2116,6 +2342,8 @@ def test_ip_address_update_fixed_address_reservation_and_create_a_and_ptr_record "has_fixed_address": True, "has_a_record": True, "has_ptr_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -2141,6 +2369,8 @@ def test_ip_address_update_fixed_address_reservation_and_create_a_and_ptr_record "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "RESERVED", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -2148,15 +2378,15 @@ def test_ip_address_update_fixed_address_reservation_and_create_a_and_ptr_record infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() @@ -2186,6 +2416,8 @@ def test_ip_address_update_mac_address_reservation_and_create_host_record( "description": "new description", "has_fixed_address": True, "has_host_record": True, + "fixed_address_name": "ReservedIP2", + "fixed_address_comment": "New Comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -2210,6 +2442,8 @@ def test_ip_address_update_mac_address_reservation_and_create_host_record( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", "description": "old description", + "fixed_address_name": "ReservedIP1", + "fixed_address_comment": "Old Comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -2217,11 +2451,11 @@ def test_ip_address_update_mac_address_reservation_and_create_host_record( infoblox_adapter.conn.create_host_record.assert_called_once() infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "New Comment", "name": "ReservedIP2"} ) infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() @@ -2253,6 +2487,8 @@ def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( "has_fixed_address": True, "has_a_record": True, "has_ptr_record": True, + "fixed_address_name": "new fa name", + "fixed_address_comment": "new fa comment", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) @@ -2278,6 +2514,8 @@ def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( "fixed_address_ref": "fixedaddress/xyz", "fixed_address_type": "MAC_ADDRESS", "description": "old description", + "fixed_address_name": "old fa name", + "fixed_address_comment": "old fa comment", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -2285,15 +2523,15 @@ def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( infoblox_adapter.conn.create_a_record.assert_called_once() infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.create_ptr_record.assert_called_once() infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", network_view="default" + fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" ) infoblox_adapter.conn.update_fixed_address.assert_called_once() infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new description", "name": "server2.local.test.net"} + ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} ) infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() From ad0d0ddf357137ac8efeaee16be0e2f87ba915b2 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 7 Jun 2024 20:45:58 +0100 Subject: [PATCH 101/229] Linting. --- nautobot_ssot/tests/infoblox/fixtures_infoblox.py | 15 ++++++++++++--- nautobot_ssot/tests/infoblox/test_models.py | 7 ++----- .../tests/infoblox/test_nautobot_adapter.py | 3 +-- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index d83bddcd6..f6594afa3 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -19,11 +19,10 @@ SecretsGroupAssociation, Status, ) -from nautobot.ipam.models import IPAddress, Prefix, VLAN, VLANGroup +from nautobot.ipam.models import VLAN, IPAddress, Prefix, VLANGroup - -from nautobot_ssot.integrations.infoblox.utils import client from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig +from nautobot_ssot.integrations.infoblox.utils import client FIXTURES = os.environ.get("FIXTURE_DIR", "nautobot_ssot/tests/infoblox/fixtures") @@ -164,11 +163,21 @@ def create_host_record(): return _json_read_fixture("create_host_record.json") +def get_fixed_address_by_ref(): + """Return a get Fixed Address by ref response.""" + return _json_read_fixture("get_fixed_address_by_ref.json") + + def get_host_by_ip(): """Return a get Host by IP response.""" return _json_read_fixture("get_host_by_ip.json") +def get_host_by_ref(): + """Return a get Host by ref response.""" + return _json_read_fixture("get_host_by_ref.json") + + def get_a_record_by_ip(): """Return a get A record by IP response.""" return _json_read_fixture("get_a_record_by_ip.json") diff --git a/nautobot_ssot/tests/infoblox/test_models.py b/nautobot_ssot/tests/infoblox/test_models.py index 4745004ad..2303b7da3 100644 --- a/nautobot_ssot/tests/infoblox/test_models.py +++ b/nautobot_ssot/tests/infoblox/test_models.py @@ -1,15 +1,12 @@ # pylint: disable=R0801 """Infoblox Integration model tests.""" import os -from unittest import mock from copy import deepcopy +from unittest import mock from django.core.exceptions import ValidationError from django.test import TestCase -from nautobot.extras.choices import ( - SecretsGroupAccessTypeChoices, - SecretsGroupSecretTypeChoices, -) +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import ExternalIntegration, Secret, SecretsGroup, SecretsGroupAssociation, Status from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index 5051c2b38..faffec45d 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -4,9 +4,8 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase - from nautobot.extras.models import RelationshipAssociation, Status -from nautobot.ipam.models import IPAddress, Namespace, Prefix, VLAN, VLANGroup +from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship From 8697679acb5c6bc37a79714f2c2b849914b56b1c Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Fri, 7 Jun 2024 20:46:22 +0100 Subject: [PATCH 102/229] Add nautobot diffsync model tests. --- .../tests/infoblox/test_nautobot_models.py | 443 ++++++++++++++++++ 1 file changed, 443 insertions(+) create mode 100644 nautobot_ssot/tests/infoblox/test_nautobot_models.py diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py new file mode 100644 index 000000000..9488319ad --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -0,0 +1,443 @@ +# pylint: disable=too-many-lines,too-many-public-methods,R0801 +"""Unit tests for the Infoblox Diffsync models.""" +from unittest.mock import Mock + +from django.test import TestCase +from nautobot.extras.models import Status, Tag +from nautobot.ipam.models import IPAddress, Namespace, Prefix + +from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter +from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter + +from .fixtures_infoblox import create_default_infoblox_config + + +def _get_ip_address_dict(attrs): + """Build dict used for creating diffsync IP address.""" + ipaddress_dict = { + "description": "Test IPAddress", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "ip_addr_type": "host", + "namespace": "dev", + "dns_name": "", + "ext_attrs": {}, + } + ipaddress_dict.update(attrs) + + return ipaddress_dict + + +def _get_network_dict(attrs): + """Build dict used for creating diffsync network.""" + network_dict = { + "network": "10.0.0.0/8", + "description": "TestNetwork", + "namespace": "dev", + "status": "Active", + "ext_attrs": {}, + } + network_dict.update(attrs) + + return network_dict + + +class TestModelNautobotNetwork(TestCase): + """Tests correct network record is created.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.config.infoblox_sync_filters = [{"network_view": "default"}, {"network_view": "dev"}] + self.namespace_dev, _ = Namespace.objects.get_or_create(name="dev") + self.status_active, _ = Status.objects.get_or_create(name="Active") + self.tag_sync_from_infoblox, _ = Tag.objects.get_or_create(name="SSoT Synced from Infoblox") + self.infoblox_adapter = InfobloxAdapter(conn=Mock(), config=self.config) + inf_ds_namespace = self.infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + self.infoblox_adapter.add(inf_ds_namespace) + inf_ds_namespace = self.infoblox_adapter.namespace( + name="dev", + ext_attrs={}, + ) + self.infoblox_adapter.add(inf_ds_namespace) + + def test_network_create_network(self): + """Validate network gets created.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8", namespace__name="dev") + + self.assertEqual("10.0.0.0/8", str(prefix.prefix)) + self.assertEqual("dev", prefix.namespace.name) + self.assertEqual("Active", prefix.status.name) + self.assertEqual("TestNetwork", prefix.description) + self.assertEqual("network", prefix.type) + self.assertIn(self.tag_sync_from_infoblox, prefix.tags.all()) + + def test_network_update_network(self): + """Validate network gets updated.""" + inf_network_atrs = { + "network_type": "network", + "namespace": "dev", + "ext_attrs": {"vlan": "10"}, + "description": "New description", + } + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/24", + status=self.status_active, + type="network", + description="Old description", + namespace=self.namespace_dev, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8", namespace__name="dev") + + self.assertEqual("10.0.0.0/8", str(prefix.prefix)) + self.assertEqual("dev", prefix.namespace.name) + self.assertEqual("Active", prefix.status.name) + self.assertEqual("New description", prefix.description) + self.assertEqual("network", prefix.type) + self.assertEqual({"vlan": "10"}, prefix.custom_field_data) + self.assertIn(self.tag_sync_from_infoblox, prefix.tags.all()) + + +class TestModelNautobotIPAddress(TestCase): + """Tests correct IP address record is created or updated.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.config.infoblox_sync_filters = [{"network_view": "default"}, {"network_view": "dev"}] + self.namespace_dev, _ = Namespace.objects.get_or_create(name="dev") + self.status_active, _ = Status.objects.get_or_create(name="Active") + self.tag_sync_from_infoblox, _ = Tag.objects.get_or_create(name="SSoT Synced from Infoblox") + self.infoblox_adapter = InfobloxAdapter(conn=Mock(), config=self.config) + inf_ds_namespace = self.infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + self.infoblox_adapter.add(inf_ds_namespace) + inf_ds_namespace = self.infoblox_adapter.namespace( + name="dev", + ext_attrs={}, + ) + self.infoblox_adapter.add(inf_ds_namespace) + + def test_ip_address_create_address_from_fixed_address_reserved(self): + """Validate ip address gets created from Infoblox fixed address reservation.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "fixed_address_name": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressReserved", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_address_from_fixed_address_mac(self): + """Validate ip address gets created from Infoblox fixed address with mac address.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_name": "FixedAddressMAC", + "fixed_address_comment": "Created From FA MAC", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_address_from_dns_record(self): + """Validate ip address gets created from Infoblox DNS host record. This also applies to A record.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "host", + "has_host_record": True, + "dns_name": "server1.nautobot.local.net", + "description": "Server1", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("Server1", ipaddress.description) + self.assertEqual("host", ipaddress.type) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_address_from_fixed_address_mac_and_dns_record(self): + """Validate ip address gets created from Infoblox Fixed Address MAC + A host record. + Fixed address name takes precedence and is recorded in the description field of Nautobot IP Address. + """ + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_a_record": True, + "dns_name": "server1.nautobot.local.net", + "description": "Server1", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_name": "FixedAddressMAC", + "fixed_address_comment": "Created From FA MAC", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + ############ + # IP Address updates + ########### + + def test_ip_address_update_address_from_fixed_address_reserved(self): + """Validate ip address gets updated from Infoblox fixed address reservation.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev", "ext_attrs": {"vlans": {}}} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "fixed_address_name": "FixedAddressMAC", + "fixed_address_comment": "Created From FA MAC", + "ext_attrs": {"gateway": "10.0.0.254"}, + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="host", + description="OldDescription", + parent=parent_pfx, + ) + + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("10.0.0.254", ipaddress.custom_field_data.get("gateway")) + + def test_ip_address_update_address_from_fixed_address_mac(self): + """Validate ip address gets created from Infoblox fixed address with mac address.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_name": "FixedAddressMAC", + "fixed_address_comment": "Created From FA MAC", + "ext_attrs": {"gateway": "10.0.0.254"}, + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="host", + parent=parent_pfx, + defaults={ + "description": "OldDescription", + "_custom_field_data": {"mac_address": "52:1f:83:d4:9a:2a"}, + }, + ) + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + + def test_ip_address_update_address_from_dns_record(self): + """Validate ip address gets created from Infoblox DNS record.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "host", + "has_a_record": True, + "dns_name": "server1.nautobot.local.net", + "description": "Server1", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="host", + parent=parent_pfx, + defaults={ + "dns_name": "server.nautobot.local.net", + "description": "OldDescription", + "_custom_field_data": {"mac_address": "52:1f:83:d4:9a:2a"}, + }, + ) + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("Server1", ipaddress.description) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("host", ipaddress.type) From a94c57ad99f69936808734e5b6f48a785d21ba29 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 12:51:29 +0100 Subject: [PATCH 103/229] Fix fixed address logic. Linting. --- .../infoblox/diffsync/adapters/infoblox.py | 47 ++++++++++++++----- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index a02518b59..ce819b267 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -15,7 +15,7 @@ InfobloxVLAN, InfobloxVLANView, ) -from nautobot_ssot.integrations.infoblox.utils.client import get_default_ext_attrs, get_dns_name +from nautobot_ssot.integrations.infoblox.utils.client import get_default_ext_attrs from nautobot_ssot.integrations.infoblox.utils.diffsync import ( build_vlan_map, get_ext_attr_dict, @@ -197,7 +197,7 @@ def load_prefixes(self, include_ipv4: bool, include_ipv6: bool, sync_filters: li except ObjectAlreadyExists: self.job.logger.warning(f"Duplicate prefix found: {new_pf}.") - def load_ipaddresses(self): # pylint: disable=too-many-branches + def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals,too-many-statements """Load InfobloxIPAddress DiffSync model.""" if self.job.debug: self.job.logger.debug("Loading IP addresses from Infoblox.") @@ -212,17 +212,15 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches _, prefix_length = _ip["network"].split("/") network_view = _ip["network_view"] dns_name = "" - fallback_dns_name = "" # Record can have multiple names, if there is a DNS record attached we should use that name - # Otherwise return non-DNS name for dns_name_candidate in _ip["names"]: - if validate_dns_name(infoblox_client=self.conn, dns_name=dns_name_candidate, network_view=network_view): - dns_name = dns_name_candidate - break - if not fallback_dns_name: - fallback_dns_name = get_dns_name(possible_fqdn=dns_name_candidate) + if not validate_dns_name( + infoblox_client=self.conn, dns_name=dns_name_candidate, network_view=network_view + ): + continue + dns_name = dns_name_candidate + break - dns_name = dns_name or fallback_dns_name namespace = map_network_view_to_namespace(value=network_view, direction="nv_to_ns") ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {}), excluded_attrs=self.excluded_attrs) @@ -234,23 +232,38 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches dns_name=dns_name, status=self.conn.get_ipaddr_status(_ip), ip_addr_type=self.conn.get_ipaddr_type(_ip), - description=_ip["comment"], ext_attrs={**default_ext_attrs, **ip_ext_attrs}, - mac_address=None if not _ip["mac_address"] else _ip["mac_address"], + mac_address="" if not _ip["mac_address"] else _ip["mac_address"], + fixed_address_name="", + fixed_address_comment="", ) - # Record references to DNS Records linked to this IP Address + # Record references to DNS Records linked to this IP Address. + # Field `comment` in IP Address records can come from linked fixed address or DNS record. + # We add extra logic to tell DNS record and fixed address comments apart. + # NOTE: We are assuming that Host/A/PTR comments are the same. + # If they're not, the first one found will be treated as the correct one. + dns_comment = "" for ref in _ip["objects"]: obj_type = ref.split("/")[0] if obj_type == "record:host": new_ip.has_host_record = True new_ip.host_record_ref = ref + if not dns_comment: + host_record = self.conn.get_host_record_by_ref(ref) + dns_comment = host_record.get("comment", "") elif obj_type == "record:a": new_ip.has_a_record = True new_ip.a_record_ref = ref + if not dns_comment: + a_record = self.conn.get_a_record_by_ref(ref) + dns_comment = a_record.get("comment", "") elif obj_type == "record:ptr": new_ip.has_ptr_record = True new_ip.ptr_record_ref = ref + if not dns_comment: + ptr_record = self.conn.get_ptr_record_by_ref(ref) + dns_comment = ptr_record.get("comment", "") # We currently only support RESERVED and MAC_ADDRESS types for fixed address elif obj_type == "fixedaddress": if "RESERVATION" in _ip["types"]: @@ -262,6 +275,14 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches new_ip.has_fixed_address = True new_ip.fixed_address_ref = ref + new_ip.description = dns_comment + + # Fixed address name and comment values can differ from the DNS name and comment retrieved from the `names` array on the IP Address record. + if new_ip.has_fixed_address: + fixed_address = self.conn.get_fixed_address_by_ref(new_ip.fixed_address_ref) + new_ip.fixed_address_name = fixed_address.get("name", "") + new_ip.fixed_address_comment = fixed_address.get("comment", "") + self.add(new_ip) def load_vlanviews(self): From 1ebe5a085c8d349b7118dd4653c388b1e6927374 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 12:52:24 +0100 Subject: [PATCH 104/229] Fix fixed address logic. --- .../infoblox/diffsync/adapters/nautobot.py | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 9fe47181d..7bf3c203a 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -290,7 +290,9 @@ def _load_all_ipaddresses_filtered(self, sync_filters: list, include_ipv4: bool, return all_ipaddresses - def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: list): + def load_ipaddresses( + self, include_ipv4: bool, include_ipv6: bool, sync_filters: list + ): # pylint: disable=too-many-branches """Load IP Addresses from Nautobot. Args: @@ -327,13 +329,22 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: # Infoblox fixed address records are of type DHCP. Only Nautobot IP addresses of type DHCP will trigger fixed address creation logic. has_fixed_address = False - mac_address = ipaddr.custom_field_data.get("mac_address") + mac_address = ipaddr.custom_field_data.get("mac_address") or "" if ipaddr.type == IPAddressTypeChoices.TYPE_DHCP: if self.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: has_fixed_address = True elif self.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: has_fixed_address = True + # Description translates to comment for DNS records only. + # If we don't have DNS name, or we don't create DNS records, then we set description to an empty string. + if self.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + description = "" + elif self.config.dns_record_type != DNSRecordTypeChoices.DONT_CREATE_RECORD and not ipaddr.dns_name: + description = "" + else: + description = ipaddr.description + custom_fields = get_valid_custom_fields(ipaddr.custom_field_data, excluded_cfs=self.excluded_cfs) _ip = self.ipaddress( address=addr, @@ -343,14 +354,20 @@ def load_ipaddresses(self, include_ipv4: bool, include_ipv6: bool, sync_filters: ip_addr_type=ipaddr.type, prefix_length=prefix.prefix_length if prefix else ipaddr.prefix_length, dns_name=ipaddr.dns_name, - description=ipaddr.description, + description=description, ext_attrs={**default_cfs, **custom_fields}, mac_address=mac_address, pk=ipaddr.id, has_fixed_address=has_fixed_address, + # Fixed address name comes from Nautobot's IP Address `description` + fixed_address_name=ipaddr.description if has_fixed_address else "", + # Only set fixed address comment if we create fixed addresses. + fixed_address_comment=( + ipaddr.custom_field_data.get("fixed_address_comment") or "" if has_fixed_address else "" + ), ) - # Pretend IP Address has matching DNS records if dns name is defined. + # Pretend IP Address has matching DNS records if `dns_name` is defined. # This will be compared against values set on Infoblox side. if ipaddr.dns_name: if self.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: From 5710357a954f41cb6e6983b12d3bd8fce00f1d44 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 12:52:48 +0100 Subject: [PATCH 105/229] Additional attributes to support fixed address. --- nautobot_ssot/integrations/infoblox/diffsync/models/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index 76bb9113b..b67c53024 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -79,6 +79,8 @@ class IPAddress(DiffSyncModel): "has_ptr_record", "has_fixed_address", "mac_address", + "fixed_address_name", + "fixed_address_comment", ) address: str @@ -95,6 +97,8 @@ class IPAddress(DiffSyncModel): has_ptr_record: bool = False has_fixed_address: bool = False mac_address: Optional[str] + fixed_address_name: Optional[str] + fixed_address_comment: Optional[str] pk: Optional[uuid.UUID] = None a_record_ref: Optional[str] = None From 932da5dede2b01681b69e8b234f1968f1bd60636 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:16:14 +0100 Subject: [PATCH 106/229] Fixes in network and ip fixed address logic. Refactor. --- .../infoblox/diffsync/models/infoblox.py | 480 +++++++++++------- 1 file changed, 291 insertions(+), 189 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index d874fe1a5..adda8046f 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -13,11 +13,11 @@ class InfobloxNetwork(Network): @classmethod def create(cls, diffsync, ids, attrs): """Create Network object in Infoblox.""" - status = attrs.get("status") + network_type = attrs.get("network_type") network = ids["network"] - network_view = ids["namespace"] + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") try: - if status != "container": + if network_type != "container": diffsync.conn.create_network( prefix=network, comment=attrs.get("description", ""), network_view=network_view ) @@ -46,9 +46,10 @@ def create(cls, diffsync, ids, attrs): def update(self, attrs): """Update Network object in Infoblox.""" + network_view = map_network_view_to_namespace(value=self.get_identifiers()["namespace"], direction="ns_to_nv") self.diffsync.conn.update_network( prefix=self.get_identifiers()["network"], - network_view=self.get_identifiers()["namespace"], + network_view=network_view, comment=attrs.get("description", ""), ) if attrs.get("ranges"): @@ -79,47 +80,57 @@ def create(cls, diffsync, ids, attrs): DNS record creation requires the IP Address to have a DNS name """ network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") - dns_name = attrs.get("dns_name") ip_address = ids["address"] - name = attrs.get("description") mac_address = attrs.get("mac_address") + has_fixed_address = attrs.get("has_fixed_address", False) + fixed_address_name = attrs.get("fixed_address_name") or "" + fixed_address_comment = attrs.get("fixed_address_comment") or "" - # Used DNS name for fixed address name if it exists. Otherwise use description. - if dns_name: - fixed_address_name = dns_name - else: - fixed_address_name = name - - if diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: + if diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED and has_fixed_address: diffsync.conn.create_fixed_address( - ip_address=ip_address, name=fixed_address_name, match_client="RESERVED", network_view=network_view - ) - diffsync.job.logger.debug( - "Created fixed address reservation, address: %s, name: %s, network_view: %s", - ip_address, - fixed_address_name, - network_view, + ip_address=ip_address, + name=fixed_address_name, + comment=fixed_address_comment, + match_client="RESERVED", + network_view=network_view, ) - elif diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created fixed address reservation, address: %s, name: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + network_view, + fixed_address_comment, + ) + elif ( + diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS + and mac_address + and has_fixed_address + ): diffsync.conn.create_fixed_address( ip_address=ip_address, name=fixed_address_name, mac_address=mac_address, match_client="MAC_ADDRESS", + comment=fixed_address_comment, network_view=network_view, ) - diffsync.job.logger.debug( - "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s", - ip_address, - fixed_address_name, - mac_address, - network_view, - ) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + mac_address, + network_view, + fixed_address_comment, + ) # DNS record not needed, we can return if diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + dns_name = attrs.get("dns_name") + dns_comment = attrs.get("description") if not dns_name: diffsync.job.logger.warning( f"Cannot create Infoblox DNS record for IP Address {ip_address}. DNS name is not defined." @@ -132,94 +143,47 @@ def create(cls, diffsync, ids, attrs): return super().create(ids=ids, diffsync=diffsync, attrs=attrs) if diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: - diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) - diffsync.job.logger.debug( - "Created DNS A record, address: %s, dns_name: %s, network_view: %s", - ip_address, - dns_name, - network_view, - ) + diffsync.conn.create_a_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS A record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) elif diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: - diffsync.conn.create_a_record(dns_name, ip_address, network_view=network_view) - diffsync.job.logger.debug( - "Created DNS A record, address: %s, dns_name: %s, network_view: %s", - ip_address, - dns_name, - network_view, - ) - diffsync.conn.create_ptr_record(dns_name, ip_address, network_view=network_view) - diffsync.job.logger.debug( - "Created DNS PTR record, address: %s, dns_name: %s, network_view: %s", - ip_address, - dns_name, - network_view, - ) + diffsync.conn.create_a_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS A record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) + diffsync.conn.create_ptr_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS PTR record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) elif diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: - diffsync.conn.create_host_record(dns_name, ip_address, network_view=network_view) - diffsync.job.logger.debug( - "Created DNS Host record, address: %s, dns_name: %s, network_view: %s", - ip_address, - dns_name, - network_view, - ) + diffsync.conn.create_host_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS Host record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - def _update_fixed_address(self, new_attrs: dict, inf_attrs: dict, ip_address: str, network_view: str) -> None: - """Updates fixed address record in Infoblox. - - Args: - new_attrs: Object attributes changed in Nautobot - inf_attrs: Infoblox object attributes - ip_address: IP address of the fixed address - network_view: Network View of the fixed address - """ - new_dns_name = new_attrs.get("dns_name") - new_description = new_attrs.get("description") - mac_address = new_attrs.get("mac_address") - - fa_update_data = {} - # Fixed Address name uses DNS Name if it's defined, then description. - if new_dns_name: - fa_update_data["name"] = new_dns_name - # DNS name cleared on Nautobot side - if new_dns_name == "": - # Description updated on Nautobot side - if new_description: - fa_update_data["name"] = new_description - # Nautobot description not updated. Copy Infoblox description over to the name attribute. - elif inf_attrs.get("description"): - fa_update_data["name"] = inf_attrs.get("description") - if new_description: - fa_update_data["comment"] = new_description - - if ( - self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED - and self.fixed_address_type == "RESERVED" - and fa_update_data - ): - self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) - self.diffsync.job.logger.debug( - "Updated fixed address reservation, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - fa_update_data, - ) - elif ( - self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS - and self.fixed_address_type == "MAC_ADDRESS" - and (fa_update_data or mac_address) - ): - if mac_address: - fa_update_data["mac"] = mac_address - self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) - self.diffsync.job.logger.debug( - "Updated fixed address with MAC, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - fa_update_data, - ) - - def _check_for_incompatible_record_types(self, attrs: dict, inf_attrs: dict, ip_address: str): + def _ip_update_check_for_incompatible_record_types(self, attrs: dict, inf_attrs: dict, ip_address: str): """Checks whether requested changes to the DNS records are compatible with existing Infoblox DNS objects. Args: @@ -270,53 +234,123 @@ def _check_for_incompatible_record_types(self, attrs: dict, inf_attrs: dict, ip_ return incompatible_record_types, incomp_msg - def update(self, attrs): # pylint: disable=too-many-branches,too-many-locals,too-many-statements - """Update IP Address object in Infoblox.""" - ids = self.get_identifiers() - inf_attrs = self.get_attrs() - ip_address = ids["address"] - network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + def _ip_update_update_fixed_address(self, new_attrs: dict, ip_address: str, network_view: str) -> None: + """Updates fixed address record in Infoblox. Triggered by IP Address update. - # Update fixed address - if inf_attrs.get("has_fixed_address"): - self._update_fixed_address( - new_attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address, network_view=network_view - ) + Args: + new_attrs: Object attributes changed in Nautobot + ip_address: IP address of the fixed address + network_view: Network View of the fixed address + """ + mac_address = new_attrs.get("mac_address") - # DNS record not needed, we can return - if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: - return super().update(attrs) + fa_update_data = {} + if "fixed_address_name" in new_attrs: + fa_update_data["name"] = new_attrs.get("fixed_address_name") or "" + if "fixed_address_comment" in new_attrs: + fa_update_data["comment"] = new_attrs.get("fixed_address_comment") or "" - # Nautobot side doesn't check if dns name is a fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. - # We get either existing DNS name, or a new one. This is because name might be the same but we need to create a PTR record. - canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) - if not canonical_dns_name: - self.diffsync.job.logger.warning( - f"Cannot update Infoblox record for IP Address {ip_address}. DNS name is not defined." + if ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED + and self.fixed_address_type == "RESERVED" + and fa_update_data + ): + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated fixed address reservation, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + extra={"grouping": "update"}, + ) + elif ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS + and self.fixed_address_type == "MAC_ADDRESS" + and (fa_update_data or mac_address) + ): + if mac_address: + fa_update_data["mac"] = mac_address + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated fixed address with MAC, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + extra={"grouping": "update"}, + ) + + def _ip_update_create_fixed_address(self, new_attrs: dict, ip_address: str, network_view: str) -> None: + """Creates fixed address record in Infoblox. Triggered by IP Address update. + + Args: + new_attrs: Object attributes changed in Nautobot + ip_address: IP address of the fixed address + network_view: Network View of the fixed address + """ + mac_address = new_attrs.get("mac_address") + fixed_address_name = new_attrs.get("fixed_address_name") or "" + fixed_address_comment = new_attrs.get("fixed_address_comment") or "" + + if self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: + self.diffsync.conn.create_fixed_address( + ip_address=ip_address, + name=fixed_address_name, + comment=fixed_address_comment, + match_client="RESERVED", + network_view=network_view, ) - return super().update(attrs) - if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): - self.diffsync.job.logger.warning( - f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created fixed address reservation, address: %s, name: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + network_view, + fixed_address_comment, + extra={"grouping": "update"}, + ) + elif self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: + self.diffsync.conn.create_fixed_address( + ip_address=ip_address, + name=fixed_address_name, + mac_address=mac_address, + comment=fixed_address_comment, + match_client="MAC_ADDRESS", + network_view=network_view, ) - return super().update(attrs) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + mac_address, + network_view, + fixed_address_comment, + extra={"grouping": "update"}, + ) - incompatible_record_types, incomp_msg = self._check_for_incompatible_record_types( - attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address - ) - if incompatible_record_types: - self.diffsync.job.logger.warning(incomp_msg) - return super().update(attrs) + def _ip_update_create_or_update_dns_records( # pylint: disable=too-many-arguments + self, new_attrs: dict, inf_attrs: dict, canonical_dns_name: str, ip_address: str, network_view: str + ) -> None: + """Creates or update DNS records connected to the IP address. Triggered by IP Address update. + Args: + new_attrs: Object attributes changed in Nautobot + inf_attrs: Infoblox object attributes + canonical_dns_name: DNS name used for create operations only + ip_address: IP address for which DNS records are created + network_view: Network View of the fixed address + """ dns_payload = {} ptr_payload = {} - new_description = attrs.get("description") - if new_description: - dns_payload.update({"comment": new_description}) - ptr_payload.update({"comment": new_description}) - if attrs.get("dns_name"): - dns_payload.update({"name": attrs.get("dns_name")}) - ptr_payload.update({"ptrdname": attrs.get("dns_name")}) + dns_comment = new_attrs.get("description") + if dns_comment: + dns_payload["comment"] = dns_comment + ptr_payload["comment"] = dns_comment + if new_attrs.get("dns_name"): + dns_payload["name"] = new_attrs.get("dns_name") + ptr_payload["ptrdname"] = new_attrs.get("dns_name") a_record_action = ptr_record_action = host_record_action = "none" if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: @@ -328,57 +362,125 @@ def update(self, attrs): # pylint: disable=too-many-branches,too-many-locals,to host_record_action = "update" if inf_attrs["has_host_record"] else "create" # IP Address in Infoblox is not a plain IP Address like in Nautobot. - # In Infoblox we can have Fixed Address, Host record for IP Address, or A Record for IP Address. + # In Infoblox we can have one of many types of Fixed Address, Host record for IP Address, or A Record, with optional PTR, for IP Address. # When syncing from Nautobot to Infoblox we take IP Address and check if it has dns_name field populated. # We then combine this with the Infoblox Config toggles to arrive at the desired state in Infoblox. + comment = dns_comment or inf_attrs.get("description") if host_record_action == "update" and dns_payload: self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=dns_payload) - self.diffsync.job.logger.debug( - "Updated Host record, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - dns_payload, - ) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated Host record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + dns_payload, + extra={"grouping": "update"}, + ) elif host_record_action == "create": - self.diffsync.conn.create_host_record(canonical_dns_name, ip_address, network_view=network_view) - self.diffsync.job.logger.debug( - "Created Host record, address: %s, network_view: %s, DNS name: %s", - ip_address, - network_view, - canonical_dns_name, - ) + self.diffsync.conn.create_host_record(canonical_dns_name, ip_address, comment, network_view=network_view) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created Host record, address: %s, network_view: %s, DNS name: %s, comment: %s", + ip_address, + network_view, + canonical_dns_name, + comment, + extra={"grouping": "update"}, + ) if a_record_action == "update" and dns_payload: self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=dns_payload) - self.diffsync.job.logger.debug( - "Updated A record, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - dns_payload, - ) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated A record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + dns_payload, + extra={"grouping": "update"}, + ) elif a_record_action == "create": - self.diffsync.conn.create_a_record(canonical_dns_name, ip_address, network_view=network_view) - self.diffsync.job.logger.debug( - "Created A record, address: %s, network_view: %s, DNS name: %s", - ip_address, - network_view, - canonical_dns_name, - ) + self.diffsync.conn.create_a_record(canonical_dns_name, ip_address, comment, network_view=network_view) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created A record, address: %s, network_view: %s, DNS name: %s, comment: %s", + ip_address, + network_view, + canonical_dns_name, + comment, + extra={"grouping": "update"}, + ) if ptr_record_action == "update" and ptr_payload: self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=ptr_payload) - self.diffsync.job.logger.debug( - "Updated PTR record, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - ptr_payload, - ) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated PTR record, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + ptr_payload, + extra={"grouping": "update"}, + ) elif ptr_record_action == "create": - self.diffsync.conn.create_ptr_record(canonical_dns_name, ip_address, network_view=network_view) - self.diffsync.job.logger.debug( - "Created PTR record, address: %s, network_view: %s, DNS name: %s", - ip_address, - network_view, - canonical_dns_name, + self.diffsync.conn.create_ptr_record(canonical_dns_name, ip_address, comment, network_view=network_view) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created PTR record, address: %s, network_view: %s, DNS name: %s, comment: %s", + ip_address, + network_view, + canonical_dns_name, + comment, + extra={"grouping": "update"}, + ) + + def update(self, attrs): + """Update IP Address object in Infoblox.""" + ids = self.get_identifiers() + inf_attrs = self.get_attrs() + ip_address = ids["address"] + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + + # Attempt update of a fixed address if Infoblox has one already + if inf_attrs.get("has_fixed_address"): + self._ip_update_update_fixed_address(new_attrs=attrs, ip_address=ip_address, network_view=network_view) + # IP Address exists in Infoblox without Fixed Address object. Nautobot side is asking for Fixed Address so we need to create one. + elif ( + attrs.get("has_fixed_address") + and self.diffsync.config.fixed_address_type != FixedAddressTypeChoices.DONT_CREATE_RECORD + ): + self._ip_update_create_fixed_address(new_attrs=attrs, ip_address=ip_address, network_view=network_view) + + # DNS record not needed, we can return + if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + return super().update(attrs) + + # Nautobot side doesn't check if dns name is a fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. + # We get either existing DNS name, or a new one. This is because name might be the same but we might need to create a new DNS record. + canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) + if not canonical_dns_name: + self.diffsync.job.logger.info( + f"Skipping DNS Infoblox record create/update for IP Address {ip_address}. DNS name is not defined." + ) + return super().update(attrs) + + if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" ) + return super().update(attrs) + + incompatible_record_types, incomp_msg = self._ip_update_check_for_incompatible_record_types( + attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address + ) + if incompatible_record_types: + self.diffsync.job.logger.warning(incomp_msg) + return super().update(attrs) + + self._ip_update_create_or_update_dns_records( + new_attrs=attrs, + inf_attrs=inf_attrs, + canonical_dns_name=canonical_dns_name, + ip_address=ip_address, + network_view=network_view, + ) + return super().update(attrs) From e8ad0e9e9de28be20a37998ba58f49b3e39a9670 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:16:50 +0100 Subject: [PATCH 107/229] Fixes to IP Address creation/update logic. --- .../infoblox/diffsync/models/nautobot.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 5fc3e80f0..4380fa320 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -225,12 +225,20 @@ def create(cls, diffsync, ids, attrs): address=addr, status_id=status, type=ip_addr_type, - description=attrs.get("description", ""), dns_name=attrs.get("dns_name", ""), parent_id=diffsync.prefix_map[(ids["namespace"], ids["prefix"])], ) if attrs.get("ext_attrs"): process_ext_attrs(diffsync=diffsync, obj=_ip, extattrs=attrs["ext_attrs"]) + if "mac_address" in attrs: + _ip.custom_field_data.update({"mac_address": attrs.get("mac_address", "")}) + if attrs.get("has_fixed_address", False) and "fixed_address_comment" in attrs: + _ip.custom_field_data.update({"fixed_address_comment": attrs.get("fixed_address_comment") or ""}) + # Fixed address name takes precedence over DNS comment field, and is recorded in the description field of Nautobot IP Address. + if attrs.get("has_fixed_address", False) and "fixed_address_name" in attrs: + _ip.description = attrs.get("fixed_address_name") or "" + else: + _ip.description = attrs.get("description") or "" try: _ip.tags.add(create_tag_sync_from_infoblox()) _ip.validated_save() @@ -257,12 +265,19 @@ def update(self, attrs): _ipaddr.type = attrs["ip_addr_type"].lower() else: _ipaddr.type = "host" - if attrs.get("description"): + # Fixed Address name takes precedence when filling out `description` field + if attrs.get("fixed_address_name"): + _ipaddr.description = attrs.get("fixed_address_name") or "" + elif attrs.get("description"): _ipaddr.description = attrs["description"] if attrs.get("dns_name"): _ipaddr.dns_name = attrs["dns_name"] if "ext_attrs" in attrs: process_ext_attrs(diffsync=self.diffsync, obj=_ipaddr, extattrs=attrs["ext_attrs"]) + if "mac_address" in attrs: + _ipaddr.custom_field_data.update({"mac_address": attrs.get("mac_address", "")}) + if "fixed_address_comment" in attrs: + _ipaddr.custom_field_data.update({"fixed_address_comment": attrs.get("fixed_address_comment") or ""}) try: _ipaddr.validated_save() return super().update(attrs) From 87fb4bb85ae070828ac474edde746198e886e906 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:17:59 +0100 Subject: [PATCH 108/229] Remove unused code. --- nautobot_ssot/integrations/infoblox/models.py | 61 ++----------------- 1 file changed, 5 insertions(+), 56 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py index e13554243..a98d47975 100644 --- a/nautobot_ssot/integrations/infoblox/models.py +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -32,16 +32,16 @@ def _get_default_cf_fields_ignore(): class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors """SSOT Infoblox Configuration model.""" - name = models.CharField(max_length=255, unique=True) + name = models.CharField(max_length=CHARFIELD_MAX_LENGTH, unique=True) description = models.CharField( - max_length=255, + max_length=CHARFIELD_MAX_LENGTH, blank=True, ) default_status = models.ForeignKey( to="extras.Status", on_delete=models.PROTECT, verbose_name="Default Object Status", - help_text="Status", + help_text="Default Object Status", ) infoblox_instance = models.ForeignKey( to="extras.ExternalIntegration", @@ -50,7 +50,7 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors help_text="Infoblox Instance", ) infoblox_wapi_version = models.CharField( - max_length=255, + max_length=CHARFIELD_MAX_LENGTH, default="v2.12", verbose_name="Infoblox WAPI version", ) @@ -92,30 +92,10 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors ) fixed_address_type = models.CharField( max_length=CHARFIELD_MAX_LENGTH, - default=FixedAddressTypeChoices.MAC_ADDRESS, + default=FixedAddressTypeChoices.DONT_CREATE_RECORD, choices=FixedAddressTypeChoices, help_text="Choose what type of Infoblox fixed IP address record to create.", ) - create_ip_reservation = models.BooleanField( - default=True, - verbose_name="Create IP Reservation", - help_text="Infoblox - Create IP Address as IP Reservation", - ) - create_host_record = models.BooleanField( - default=False, - verbose_name="Create Host Record", - help_text="Infoblox - Create IP Address Host Record", - ) - create_a_record = models.BooleanField( - default=False, - verbose_name="Create A Record", - help_text="Infoblox - Create IP Address A Record", - ) - create_ptr_record = models.BooleanField( - default=False, - verbose_name="Create PTR Record", - help_text="Infoblox - Create PTR Record for IP Address", - ) job_enabled = models.BooleanField( default=False, verbose_name="Enabled for Sync Job", @@ -248,36 +228,6 @@ def _clean_import_ip(self): } ) - def _clean_ip_address_create_options(self): - """Performs validation of the Infoblox IP Address creation options.""" - if self.create_a_record and self.create_host_record: - raise ValidationError( - { - "create_a_record": "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", - "create_host_record": "Only one of `create_a_record` or `create_host_record` can be enabled at the same time.", - }, - ) - if self.create_host_record and self.create_ptr_record: - raise ValidationError( - { - "create_host_record": "`create_ptr_record` can be used with `create_a_record` only.", - "create_ptr_record": "`create_ptr_record` can be used with `create_a_record` only.", - }, - ) - - if self.create_ptr_record and not self.create_a_record: - raise ValidationError( - {"create_ptr_record": "To use `create_ptr_record` you must enable `create_a_record`."}, - ) - - if not (self.create_a_record or self.create_host_record): - raise ValidationError( - { - "create_a_record": "Either `create_a_record` or `create_host_record` must be enabled.", - "create_host_record": "Either `create_a_record` or `create_host_record` must be enabled.", - }, - ) - def _clean_infoblox_dns_view_mapping(self): """Performs validation of the infoblox_dns_view_mapping field.""" if not isinstance(self.infoblox_dns_view_mapping, dict): @@ -319,6 +269,5 @@ def clean(self): self._clean_infoblox_sync_filters() self._clean_infoblox_instance() self._clean_import_ip() - self._clean_ip_address_create_options() self._clean_infoblox_dns_view_mapping() self._clean_cf_fields_ignore() From 952b28c5e34387504d4e9475afda420b0cf35bdb Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:18:43 +0100 Subject: [PATCH 109/229] Add new cfs. Update config defaults. --- .../integrations/infoblox/signals.py | 42 +++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index fc8c147e5..807777d36 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -13,6 +13,7 @@ ) from django.conf import settings from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices config = settings.PLUGINS_CONFIG["nautobot_ssot"] @@ -33,6 +34,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa CustomField = apps.get_model("extras", "CustomField") Prefix = apps.get_model("ipam", "Prefix") IPAddress = apps.get_model("ipam", "IPAddress") + Namespace = apps.get_model("ipam", "Namespace") Tag = apps.get_model("extras", "Tag") Relationship = apps.get_model("extras", "Relationship") ExternalIntegration = apps.get_model("extras", "ExternalIntegration") @@ -52,7 +54,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa "color": TAG_COLOR, }, ) - for model in [IPAddress, Prefix, VLAN]: + for model in [IPAddress, Namespace, Prefix, VLAN]: tag_sync_from_infoblox.content_types.add(ContentType.objects.get_for_model(model)) tag_sync_to_infoblox, _ = Tag.objects.get_or_create( name="SSoT Synced to Infoblox", @@ -82,6 +84,24 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa ) range_custom_field.content_types.add(ContentType.objects.get_for_model(Prefix)) + mac_address_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="mac_address", + defaults={ + "label": "MAC Address", + }, + ) + mac_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + fixed_address_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="fixed_address_comment", + defaults={ + "label": "Fixed Address Comment", + }, + ) + fixed_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + # add Prefix -> VLAN Relationship relationship_dict = { "label": "Prefix -> VLAN", @@ -143,12 +163,12 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa ) external_integration, _ = ExternalIntegration.objects.get_or_create( name="MigratedInfobloxInstance", - defaults=dict( # pylint: disable=use-dict-literal - remote_url=str(config.get("infoblox_url", "https://replace.me.local")), - secrets_group=secrets_group, - verify_ssl=bool(config.get("infoblox_verify_ssl", True)), - timeout=infoblox_request_timeout, - ), + defaults={ + "remote_url": str(config.get("infoblox_url", "https://replace.me.local")), + "secrets_group": secrets_group, + "verify_ssl": bool(config.get("infoblox_verify_ssl", True)), + "timeout": infoblox_request_timeout, + }, ) SSOTInfobloxConfig.objects.create( @@ -167,11 +187,9 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa job_enabled=True, infoblox_sync_filters=infoblox_sync_filters, infoblox_dns_view_mapping={}, - cf_fields_ignore={}, - create_ip_reservation=False, - create_a_record=False, - create_host_record=True, - create_ptr_record=False, + cf_fields_ignore={"extensible_attributes": [], "custom_fields": []}, + fixed_address_type=FixedAddressTypeChoices.DONT_CREATE_RECORD, + dns_record_type=DNSRecordTypeChoices.HOST_RECORD, ) From 71e2b158be84e9d5e820a63c1d8bda063191a70e Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:19:47 +0100 Subject: [PATCH 110/229] Add ger by ref methods for host and fixed address. Improve error msg. --- .../integrations/infoblox/utils/client.py | 100 ++++++++++++++++-- 1 file changed, 90 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 9614093e9..7cdaa23c6 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -2,13 +2,13 @@ from __future__ import annotations -from functools import lru_cache import ipaddress import json import logging import re import urllib.parse from collections import defaultdict +from functools import lru_cache from typing import Optional import requests @@ -50,8 +50,8 @@ def get_default_ext_attrs(review_list: list, excluded_attrs: Optional[list] = No excluded_attrs = [] default_ext_attrs = {} for item in review_list: - pf_ext_attrs = get_ext_attr_dict(extattrs=item.get("extattrs", {}), excluded_attrs=excluded_attrs) - for attr in pf_ext_attrs: + normalized_ext_attrs = get_ext_attr_dict(extattrs=item.get("extattrs", {}), excluded_attrs=excluded_attrs) + for attr in normalized_ext_attrs: if attr in excluded_attrs: continue if attr not in default_ext_attrs: @@ -181,13 +181,20 @@ def _request(self, method, path, **kwargs): resp = self.session.request(method, url, timeout=self.timeout, **kwargs) # Infoblox provides meaningful error messages for error codes >= 400 + err_msg = "HTTP error while talking to Infoblox API." if resp.status_code >= 400: try: err_msg = resp.json() except json.decoder.JSONDecodeError: err_msg = resp.text logger.error(err_msg) - resp.raise_for_status() + # Ensure Job logs display error messages retrieved from the Infoblox API response. + # Default error message does not have enough context. + try: + resp.raise_for_status() + except HTTPError as err: + exc_msg = f"{str(err)}. {err_msg}" + raise HTTPError(exc_msg, response=err.response) from err return resp def _delete(self, resource): @@ -672,6 +679,44 @@ def get_host_record_by_ip(self, ip_address, network_view: Optional[str] = None): logger.error(response.text) return response.text + def get_host_record_by_ref(self, ref: str): + """Get the Host record by ref. + + Args: + ref (str): reference to the Host record + + Returns: + (dict) Host record + + Return Response: + { + "_ref": "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default", + "ipv4addrs": [ + { + "_ref": "record:host_ipv4addr/ZG5zLmhvc3RfYWRkcmVzcyQuX2RlZmF1bHQudGVzdC50ZXN0ZGV2aWNlMS4xMC4yMjAuMC4xMDEu:10.220.0.101/testdevice1.test/default", + "configure_for_dhcp": true, + "host": "testdevice1.test", + "ipv4addr": "10.220.0.101", + "mac": "11:11:11:11:11:11" + } + ], + "name": "testdevice1.test", + "view": "default" + } + """ + url_path = f"{ref}" + params = { + "_return_fields": "name,view,ipv4addr,comment", + } + response = self._request("GET", path=url_path, params=params) + logger.error(response.text) + try: + logger.debug(response.json()) + return response.json() + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + def get_a_record_by_name(self, fqdn, network_view: Optional[str] = None): """Get the A record for a FQDN. @@ -1157,7 +1202,8 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: except json.decoder.JSONDecodeError: logger.error(response.text) return response.text - # TODO: What does the below code do? We don't return any of this. @progala + # In-place update json_response containing prefixes with DHCP ranges, if found. + # This should be an opt-in if not ipv6: ranges = self.get_all_ranges(prefix=prefix, network_view=network_view) for returned_prefix in json_response: @@ -1206,7 +1252,7 @@ def get_authoritative_zone(self, network_view: Optional[str] = None): logger.error(response.text) return response.text - @lru_cache(maxsize=None) + @lru_cache(maxsize=1024) def get_authoritative_zones_for_dns_view(self, view: str): """Get authoritative zone list for given DNS view. @@ -1313,6 +1359,40 @@ def find_next_available_ip(self, network, network_view: Optional[str] = None): return next_ip_avail + def get_fixed_address_by_ref(self, ref: str): + """Get the Fixed Address object by ref. + + Args: + ref (str): reference to the Fixed Address object + + Returns: + (dict) Fixed Address object + + Return Response: + { + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev", + "extattrs": { + + }, + "mac": "52:1f:83:d4:9a:2e", + "name": "host-fixed1", + "network": "10.0.0.0/24", + "network_view": "dev" + } + """ + url_path = f"{ref}" + params = { + "_return_fields": "mac,network,network_view,comment,extattrs,name", + } + response = self._request("GET", path=url_path, params=params) + logger.error(response.text) + try: + logger.debug(response.json()) + return response.json() + except json.decoder.JSONDecodeError: + logger.error(response.text) + return response.text + def reserve_fixed_address(self, network, mac_address, network_view: Optional[str] = None): """Reserve the next available ip address for a given network range. @@ -1343,7 +1423,7 @@ def reserve_fixed_address(self, network, mac_address, network_view: Optional[str return response.text return False - def create_fixed_address( + def create_fixed_address( # pylint: disable=too-many-arguments self, ip_address, name: str = None, @@ -1407,10 +1487,10 @@ def update_fixed_address(self, ref, data): try: response = self._request("PUT", path=ref, params=params, json=data) except HTTPError as err: - logger.error("Could not update Host address: %s for ref %s", err.response.text, ref) + logger.error("Could not update fixed address: %s for ref %s", err.response.text, ref) return None try: - logger.debug("Infoblox host record updated: %s", response.json()) + logger.debug("Infoblox fixed address record updated: %s", response.json()) results = response.json() return results except json.decoder.JSONDecodeError: @@ -2223,7 +2303,7 @@ def get_dns_view_for_network_view(self, network_view: str): return dns_view - @lru_cache(maxsize=None) + @lru_cache(maxsize=1024) def get_default_dns_view_for_network_view(self, network_view: str): """Get default (first on the list) DNS view for given network view. From 20c3a379734a1e65ba7c72916525334342036695 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:20:29 +0100 Subject: [PATCH 111/229] Add cfs. Add namespace to tagged objects. --- .../integrations/infoblox/utils/diffsync.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index fb3504cd2..5847e1a22 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -1,10 +1,12 @@ """Utilities for DiffSync related stuff.""" from typing import Optional + from django.contrib.contenttypes.models import ContentType from django.utils.text import slugify -from nautobot.ipam.models import IPAddress, Prefix, VLAN from nautobot.extras.models import CustomField, Tag +from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix + from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR @@ -18,7 +20,7 @@ def create_tag_sync_from_infoblox(): "color": TAG_COLOR, }, ) - for model in [IPAddress, Prefix, VLAN]: + for model in [IPAddress, Namespace, Prefix, VLAN]: tag.content_types.add(ContentType.objects.get_for_model(model)) return tag @@ -87,14 +89,16 @@ def build_vlan_map(vlans: list): return vlan_map -def get_valid_custom_fields(cfs: dict, excluded_cfs: list): +def get_valid_custom_fields(cfs: dict, excluded_cfs: Optional[list] = None): """Remove custom fields that are on the excluded list. Args: cfs: custom fields excluded_cfs: list of excluded custom fields """ - default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges", "mac_address"] + if excluded_cfs is None: + excluded_cfs = [] + default_excluded_cfs = ["dhcp_ranges", "fixed_address_comment", "mac_address", "ssot_synced_to_infoblox"] excluded_cfs.extend(default_excluded_cfs) valid_cfs = {} for cf_name, val in cfs.items(): @@ -118,7 +122,7 @@ def get_default_custom_fields(cf_contenttype: ContentType, excluded_cfs: Optiona excluded_cfs = [] customfields = CustomField.objects.filter(content_types=cf_contenttype) # These cfs are always excluded - default_excluded_cfs = ["ssot_synced_to_infoblox", "dhcp_ranges"] + default_excluded_cfs = ["dhcp_ranges", "fixed_address_comment", "mac_address", "ssot_synced_to_infoblox"] # User defined excluded cfs excluded_cfs.extend(default_excluded_cfs) default_cfs = {} From a56affa2eb019a88e5c8acf90ca4e3450dd3bc80 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:46:13 +0100 Subject: [PATCH 112/229] Update templates. --- .../ssotinfobloxconfig_retrieve.html | 16 ---------------- .../ssotinfobloxconfig_update.html | 4 ---- 2 files changed, 20 deletions(-) diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html index b3d425583..cc13df31c 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -76,22 +76,6 @@

    Infoblox - DNS record type {{ object.dns_record_type }} - - Infoblox - Create IP Address as IP Reservation - {{ object.create_ip_reservation }} - - - Infoblox - Create Host Record for IP Address - {{ object.create_host_record }} - - - Infoblox - Create A Record for IP Address - {{ object.create_a_record }} - - - Infoblox - Create PTR Record for IP Address - {{ object.create_ptr_record }} - Can be used in Sync Job {{ object.job_enabled }} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html index f77dbe608..718e89b1d 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -18,10 +18,6 @@ {% render_field form.import_ipv6 %} {% render_field form.fixed_address_type %} {% render_field form.dns_record_type %} - {% render_field form.create_ip_reservation %} - {% render_field form.create_host_record %} - {% render_field form.create_a_record %} - {% render_field form.create_ptr_record %} {% render_field form.default_status %} {% render_field form.job_enabled %}

    From 575c6783c4aa4c1718a59d4644cf3a587f0201e8 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:46:36 +0100 Subject: [PATCH 113/229] Add test fixtures. --- .../fixtures/get_fixed_address_by_ref.json | 10 ++++++++++ .../tests/infoblox/fixtures/get_host_by_ref.json | 14 ++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json new file mode 100644 index 000000000..078e774d5 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json @@ -0,0 +1,10 @@ +{ + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev", + "extattrs": { + + }, + "mac": "52:1f:83:d4:9a:2e", + "name": "host-fixed1", + "network": "10.0.0.0/24", + "network_view": "dev" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json new file mode 100644 index 000000000..52b63c16d --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json @@ -0,0 +1,14 @@ +{ + "_ref": "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default", + "ipv4addrs": [ + { + "_ref": "record:host_ipv4addr/ZG5zLmhvc3RfYWRkcmVzcyQuX2RlZmF1bHQudGVzdC50ZXN0ZGV2aWNlMS4xMC4yMjAuMC4xMDEu:10.220.0.101/testdevice1.test/default", + "configure_for_dhcp": true, + "host": "testdevice1.test", + "ipv4addr": "10.220.0.101", + "mac": "11:11:11:11:11:11" + } + ], + "name": "testdevice1.test", + "view": "default" +} From 0491a5430d9c9ef69dfdcd44a06a65f1b2e09af2 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:47:17 +0100 Subject: [PATCH 114/229] Add client tests. --- nautobot_ssot/tests/infoblox/test_client.py | 88 ++++++++++++++++----- 1 file changed, 68 insertions(+), 20 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_client.py b/nautobot_ssot/tests/infoblox/test_client.py index 424a325d7..aa04fb03a 100644 --- a/nautobot_ssot/tests/infoblox/test_client.py +++ b/nautobot_ssot/tests/infoblox/test_client.py @@ -2,48 +2,50 @@ # pylint: disable=protected-access # pylint: disable=too-many-public-methods +import unittest from collections import namedtuple from os import path - -import unittest from unittest.mock import patch -from requests.models import HTTPError + import requests_mock +from requests.models import HTTPError from nautobot_ssot.integrations.infoblox.utils.client import InvalidUrlScheme, get_dns_name from .fixtures_infoblox import ( - get_ptr_record_by_ip, - get_ptr_record_by_name, - get_ptr_record_by_ref, - localhost_client_infoblox, - get_all_ipv4address_networks, - get_all_ipv4address_networks_medium, - get_all_ipv4address_networks_large, - get_all_ipv4address_networks_bulk, - create_ptr_record, + LOCALHOST, create_a_record, create_host_record, - get_host_by_ip, + create_ptr_record, + find_network_reference, + find_next_available_ip, get_a_record_by_ip, get_a_record_by_name, get_a_record_by_ref, - get_host_record_by_name, get_all_dns_views, - get_dhcp_lease_from_ipv4, - get_dhcp_lease_from_hostname, + get_all_ipv4address_networks, + get_all_ipv4address_networks_bulk, + get_all_ipv4address_networks_large, + get_all_ipv4address_networks_medium, + get_all_network_views, get_all_ranges, get_all_subnets, get_authoritative_zone, get_authoritative_zones_for_dns_view, + get_dhcp_lease_from_hostname, + get_dhcp_lease_from_ipv4, + get_fixed_address_by_ref, + get_host_by_ip, + get_host_by_ref, + get_host_record_by_name, get_network_containers, get_network_containers_ipv6, - get_all_network_views, get_network_view, - find_network_reference, - find_next_available_ip, + get_ptr_record_by_ip, + get_ptr_record_by_name, + get_ptr_record_by_ref, + localhost_client_infoblox, search_ipv4_address, - LOCALHOST, ) Origin = namedtuple("Origin", ["name", "slug"]) @@ -196,6 +198,29 @@ def test_get_all_ipv4_address_networks_bulk_data_success(self): resp = self.infoblox_client.get_all_ipv4address_networks(prefixes=prefixes) self.assertEqual(resp, get_all_ipv4address_networks_bulk()[0]) + def test_get_fixed_address_by_ref_success(self): + """Test get_fixed_address_by_ref success.""" + mock_ref = "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev" + mock_response = get_fixed_address_by_ref() + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_fixed_address_by_ref(mock_ref) + + self.assertEqual(resp, mock_response) + + def test_get_fixed_address_by_ref_fail(self): + """Test get_fixed_address_by_ref fail.""" + mock_ref = "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev" + mock_response = "" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=404) + with self.assertRaises(HTTPError) as context: + self.infoblox_client.get_fixed_address_by_ref(mock_ref) + + self.assertEqual(context.exception.response.status_code, 404) + def test_get_host_record_by_name_success(self): """Test get_host_by_record success.""" mock_fqdn = "test.fqdn.com" @@ -246,6 +271,29 @@ def test_get_host_record_by_ip_fail(self): self.assertEqual(context.exception.response.status_code, 404) + def test_get_host_record_by_ref_success(self): + """Test get_host_record_by_ref success.""" + mock_ref = "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default" + mock_response = get_host_by_ref() + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_host_record_by_ref(mock_ref) + + self.assertEqual(resp, mock_response) + + def test_get_host_record_by_ref_fail(self): + """Test get_host_record_by_ref fail.""" + mock_ref = "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default" + mock_response = "" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_ref}", json=mock_response, status_code=404) + with self.assertRaises(HTTPError) as context: + self.infoblox_client.get_host_record_by_ref(mock_ref) + + self.assertEqual(context.exception.response.status_code, 404) + def test_get_a_record_by_name_success(self): """Test get_a_record_by_name success.""" mock_fqdn = "test.fqdn.com" From 8a75a36dfafcbcba10359293a2edcdc7603d1988 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:48:23 +0100 Subject: [PATCH 115/229] Linting. --- nautobot_ssot/tests/infoblox/test_infoblox_adapter.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index f411d0a6e..073bde971 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -2,9 +2,7 @@ import unittest -from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import ( - InfobloxAdapter, -) +from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from .fixtures_infoblox import create_default_infoblox_config From 11633f38514c300aa5bb3a129aead2c9ef22e14f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:48:56 +0100 Subject: [PATCH 116/229] Update migrations. --- .../migrations/0009_ssotconfig_ssotinfobloxconfig.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py index 52e2784a5..a5dafbca6 100644 --- a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py +++ b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-05-21 17:21 +# Generated by Django 3.2.23 on 2024-06-04 16:04 import django.core.serializers.json from django.db import migrations, models @@ -71,9 +71,8 @@ class Migration(migrations.Migration): ), ("import_ipv4", models.BooleanField(default=True)), ("import_ipv6", models.BooleanField(default=False)), - ("create_host_record", models.BooleanField(default=True)), - ("create_a_record", models.BooleanField(default=False)), - ("create_ptr_record", models.BooleanField(default=False)), + ("dns_record_type", models.CharField(default="create-host-record", max_length=255)), + ("fixed_address_type", models.CharField(default="do-not-create-record", max_length=255)), ("job_enabled", models.BooleanField(default=False)), ("default_status", models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="extras.status")), ( From dc26832285c7c625dd2c536143e4f0f8a83d8dbf Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 13:49:29 +0100 Subject: [PATCH 117/229] Update attr in test object. --- nautobot_ssot/tests/infoblox/test_nautobot_models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py index 9488319ad..6781591df 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_models.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -38,6 +38,7 @@ def _get_network_dict(attrs): "namespace": "dev", "status": "Active", "ext_attrs": {}, + "vlans": {}, } network_dict.update(attrs) @@ -305,7 +306,7 @@ def test_ip_address_create_address_from_fixed_address_mac_and_dns_record(self): def test_ip_address_update_address_from_fixed_address_reserved(self): """Validate ip address gets updated from Infoblox fixed address reservation.""" - inf_network_atrs = {"network_type": "network", "namespace": "dev", "ext_attrs": {"vlans": {}}} + inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) self.infoblox_adapter.add(inf_ds_network) inf_address_atrs = { From de1bf78324612a1f1be302eed784525a99ebd3f5 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 14:01:53 +0100 Subject: [PATCH 118/229] Linting. --- .../integrations/infoblox/diffsync/models/infoblox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index adda8046f..e3e9140e3 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -72,7 +72,7 @@ class InfobloxIPAddress(IPAddress): """Infoblox implementation of the VLAN Model.""" @classmethod - def create(cls, diffsync, ids, attrs): + def create(cls, diffsync, ids, attrs): # pylint: disable=too-many-branches """Creates IP Address Reservation. Additionally create DNS Host record or an A record. Optionally creates a PTR record in addition to an A record. @@ -330,7 +330,7 @@ def _ip_update_create_fixed_address(self, new_attrs: dict, ip_address: str, netw extra={"grouping": "update"}, ) - def _ip_update_create_or_update_dns_records( # pylint: disable=too-many-arguments + def _ip_update_create_or_update_dns_records( # pylint: disable=too-many-arguments,too-many-branches self, new_attrs: dict, inf_attrs: dict, canonical_dns_name: str, ip_address: str, network_view: str ) -> None: """Creates or update DNS records connected to the IP address. Triggered by IP Address update. From d12620979bfa862757a56aede19ac62fbc4ae67f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 10 Jun 2024 14:02:34 +0100 Subject: [PATCH 119/229] Test if namespace is correctly tagged. --- .../tests/infoblox/test_tags_and_cfs.py | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index c935399b6..9bf2d1193 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -7,7 +7,7 @@ from django.test import TestCase from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Status, Tag -from nautobot.ipam.models import VLAN, IPAddress, Prefix, VLANGroup +from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter @@ -29,7 +29,7 @@ def setUp(self): "color": "40bfae", }, ) - for model in [IPAddress, Prefix, VLAN]: + for model in [IPAddress, Namespace, Prefix, VLAN]: self.tag_sync_from_infoblox.content_types.add(ContentType.objects.get_for_model(model)) self.tag_sync_to_infoblox, _ = Tag.objects.get_or_create( name="SSoT Synced to Infoblox", @@ -47,22 +47,32 @@ def test_tags_have_correct_content_types_set(self): """Ensure tags have correct content types configured.""" for model in (IPAddress, Prefix, VLAN): content_type = ContentType.objects.get_for_model(model) - self.assertIn(content_type, self.tag_sync_from_infoblox.content_types.all()) self.assertIn(content_type, self.tag_sync_to_infoblox.content_types.all()) + for model in (IPAddress, Namespace, Prefix, VLAN): + content_type = ContentType.objects.get_for_model(model) + self.assertIn(content_type, self.tag_sync_from_infoblox.content_types.all()) + def test_objects_synced_from_infoblox_are_tagged(self): """Ensure objects synced from Infoblox have 'SSoT Synced from Infoblox' tag applied.""" - nb_diffsync = NautobotAdapter(config=self.config) - nb_diffsync.job = Mock() - nb_diffsync.load() + nautobot_adapter = NautobotAdapter(config=self.config) + nautobot_adapter.job = Mock() + nautobot_adapter.load() + + Namespace.objects.get_or_create(name="Global") infoblox_adapter = InfobloxAdapter(conn=Mock(), config=self.config) - ds_namespace = infoblox_adapter.namespace( + ds_namespace_global = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) - infoblox_adapter.add(ds_namespace) + infoblox_adapter.add(ds_namespace_global) + ds_namespace_dev = infoblox_adapter.namespace( + name="dev", + ext_attrs={}, + ) + infoblox_adapter.add(ds_namespace_dev) ds_prefix = infoblox_adapter.prefix( network="10.0.0.0/8", description="Test Network", @@ -96,8 +106,10 @@ def test_objects_synced_from_infoblox_are_tagged(self): ext_attrs={}, ) infoblox_adapter.add(ds_vlan) + infoblox_adapter.sync_to(nautobot_adapter) - nb_diffsync.sync_from(infoblox_adapter) + namespace = Namespace.objects.get(name="dev") + self.assertEqual(namespace.tags.all()[0], self.tag_sync_from_infoblox) prefix = Prefix.objects.get(network="10.0.0.0", prefix_length="8") self.assertEqual(prefix.tags.all()[0], self.tag_sync_from_infoblox) From c7eb2875c0667540cc58ec95d65deb6f32c1d7cc Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:25:26 -0500 Subject: [PATCH 120/229] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20Interface=20D?= =?UTF-8?q?iffSync=20model=20as=20child=20of=20Device.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 73 +++++++++++++++++++++++++++++++++- 1 file changed, 71 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 3362e5900..08ec5f9a3 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -9,7 +9,7 @@ from django.templatetags.static import static from django.urls import reverse -from nautobot.dcim.models import Device, DeviceType, Location, LocationType, Manufacturer, Platform +from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.jobs import ObjectVar, StringVar from nautobot.extras.models import ExternalIntegration, Role @@ -204,7 +204,7 @@ class DeviceModel(NautobotModel): "tenant__name", "asset_tag", ) - # _children = {"interface": "interfaces"} + _children = {"interface": "interfaces"} name: str location__name: str @@ -219,6 +219,41 @@ class DeviceModel(NautobotModel): status__name: str tenant__name: Optional[str] asset_tag: Optional[str] + interface: List["Interface"] = [] + + +class InterfaceModel(NautobotModel): + """Shared data model representing an Interface in either of the local or remote Nautobot instances.""" + + # Metadata about this model + _model = Interface + _modelname = "interface" + _identifiers = ("name", "device__name") + _attributes = ( + "device__location__name", + "device__location__parent__name", + "description", + "enabled", + "mac_address", + "mgmt_only", + "mtu", + "type", + "status__name", + ) + _children = {} + + # Data type declarations for all identifiers and attributes + device__name: str + device__location__name: str + device__location__parent__name: str + description: Optional[str] + enabled: bool + mac_address: Optional[str] + mgmt_only: bool + mtu: Optional[int] + name: str + type: str + status__name: str class LocationRemoteModel(LocationModel): @@ -358,6 +393,7 @@ class NautobotRemote(DiffSync): platform = PlatformModel role = RoleModel device = DeviceModel + interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] @@ -403,6 +439,7 @@ def load(self): self.load_device_types() self.load_platforms() self.load_devices() + self.load_interfaces() def load_location_types(self): """Load LocationType data from the remote Nautobot instance.""" @@ -537,6 +574,37 @@ def load_devices(self): ) self.add(device) + def load_interfaces(self): + """Load Interfaces data from the remote Nautobot instance.""" + for interface in self._get_api_data("api/dcim/interfaces/?depth=3"): + try: + dev = self.get( + self.device, + { + "name": interface["device"]["name"], + "location__name": interface["device"]["location"]["name"], + "location__parent__name": interface["device"]["location"]["parent"]["name"], + }, + ) + new_interface = self.interface( + name=interface["name"], + device__name=interface["device"]["name"], + device__location__name=interface["device"]["location"]["name"], + device__location__parent__name=interface["device"]["location"]["parent"]["name"], + description=interface["description"], + enabled=interface["enabled"], + mac_address=interface["mac_address"], + mgmt_only=interface["mgmt_only"], + mtu=interface["mtu"], + type=interface["type"], + status__name=interface["status"]["name"], + pk=interface["id"], + ) + self.add(new_interface) + dev.add_child(new_interface) + except ObjectNotFound: + self.job.logger.warning(f"Unable to find Device {interface['device']['name']} loaded.") + def get_content_types(self, entry): """Create list of dicts of ContentTypes. @@ -588,6 +656,7 @@ class NautobotLocal(NautobotAdapter): platform = PlatformModel role = RoleModel device = DeviceModel + interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] From 4739768cef447eb47dbaa42de720268277654b0f Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:27:38 -0500 Subject: [PATCH 121/229] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20IPAddress=20D?= =?UTF-8?q?iffSync=20model=20and=20associated=20load=20method.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 70 ++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 08ec5f9a3..a458ee735 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -13,7 +13,7 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.jobs import ObjectVar, StringVar from nautobot.extras.models import ExternalIntegration, Role -from nautobot.ipam.models import Prefix +from nautobot.ipam.models import IPAddress, Prefix from nautobot.tenancy.models import Tenant from diffsync import DiffSync @@ -120,6 +120,32 @@ class PrefixModel(NautobotModel): pk: Optional[UUID] +class IPAddressModel(NautobotModel): + """Shared data model representing an IPAddress in either of the local or remote Nautobot instances.""" + + # Metadata about this model + _model = IPAddress + _modelname = "ipaddress" + _identifiers = ("host",) + _attributes = ( + "mask_length", + "parent__network", + "status__name", + "ip_version", + "tenant__name", + ) + + # Data type declarations for all identifiers and attributes + host: str + mask_length: int + parent__network: str + status__name: str + ip_version: int + tenant__name: Optional[str] + + pk: Optional[UUID] + + class TenantModel(NautobotModel): """Shared data model representing a Tenant in either of the local or remote Nautobot instances.""" @@ -388,6 +414,7 @@ class NautobotRemote(DiffSync): location = LocationRemoteModel tenant = TenantRemoteModel prefix = PrefixRemoteModel + ipaddress = IPAddressModel manufacturer = ManufacturerModel device_type = DeviceTypeModel platform = PlatformModel @@ -396,7 +423,17 @@ class NautobotRemote(DiffSync): interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models - top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] + top_level = [ + "tenant", + "locationtype", + "location", + "manufacturer", + "platform", + "role", + "device", + "prefix", + "ipaddress", + ] def __init__(self, *args, url=None, token=None, job=None, **kwargs): """Instantiate this class, but do not load data immediately from the remote system. @@ -435,6 +472,7 @@ def load(self): self.load_roles() self.load_tenants() self.load_prefixes() + self.load_ipaddresses() self.load_manufacturers() self.load_device_types() self.load_platforms() @@ -508,6 +546,21 @@ def load_prefixes(self): self.add(prefix) self.job.logger.debug(f"Loaded {prefix} from remote Nautobot instance") + def load_ipaddresses(self): + """Load IPAddresses data from the remote Nautobot instance.""" + for ipaddr_entry in self._get_api_data("api/ipam/ipaddresses/?depth=2"): + ipaddr = self.ipaddress( + host=ipaddr_entry["host"], + mask_length=ipaddr_entry["mask_length"], + parent__network=ipaddr_entry["parent"]["network"], + status__name=ipaddr_entry["status"]["name"], + ip_version=ipaddr_entry["ip_version"], + tenant__name=ipaddr_entry["tenant"]["name"], + pk=ipaddr_entry["id"], + ) + self.add(ipaddr) + self.job.logger.debug(f"Loaded {ipaddr} from remote Nautobot instance") + def load_manufacturers(self): """Load Manufacturers data from the remote Nautobot instance.""" for manufacturer in self._get_api_data("api/dcim/manufacturers/?depth=1"): @@ -651,6 +704,7 @@ class NautobotLocal(NautobotAdapter): location = LocationModel tenant = TenantModel prefix = PrefixModel + ipaddress = IPAddressModel manufacturer = ManufacturerModel device_type = DeviceTypeModel platform = PlatformModel @@ -659,7 +713,17 @@ class NautobotLocal(NautobotAdapter): interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models - top_level = ["tenant", "locationtype", "location", "manufacturer", "platform", "role", "device"] + top_level = [ + "tenant", + "locationtype", + "location", + "manufacturer", + "platform", + "role", + "device", + "prefix", + "ipaddress", + ] # The actual Data Source and Data Target Jobs are relatively simple to implement From a5582ed7e7102cbc208c88c2d0f69626a5589334 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:28:11 -0500 Subject: [PATCH 122/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Add/correct=20the?= =?UTF-8?q?=20DataMappings=20for=20both=20example=20Jobs=20to=20include=20?= =?UTF-8?q?added=20models.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index a458ee735..1c3a5969f 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -763,10 +763,17 @@ class Meta: def data_mappings(cls): """This Job maps Region and Site objects from the remote system to the local system.""" return ( - DataMapping("Region (remote)", None, "Region (local)", reverse("dcim:location_list")), - DataMapping("Site (remote)", None, "Site (local)", reverse("dcim:location_list")), + DataMapping("LocationType (remote)", None, "LocationType (local)", reverse("dcim:locationtype_list")), + DataMapping("Location (remote)", None, "Location (local)", reverse("dcim:location_list")), + DataMapping("Role (remote)", None, "Role (local)", reverse("extras:role_list")), DataMapping("Prefix (remote)", None, "Prefix (local)", reverse("ipam:prefix_list")), + DataMapping("IPAddress (remote)", None, "IPAddress (local)", reverse("ipam:ipaddress_list")), DataMapping("Tenant (remote)", None, "Tenant (local)", reverse("tenancy:tenant_list")), + DataMapping("DeviceType (remote)", None, "DeviceType (local)", reverse("dcim:devicetype_list")), + DataMapping("Manufacturer (remote)", None, "Manufacturer (local)", reverse("dcim:manufacturer_list")), + DataMapping("Platform (remote)", None, "Platform (local)", reverse("dcim:platform_list")), + DataMapping("Device (remote)", None, "Device (local)", reverse("dcim:device_list")), + DataMapping("Interface (remote)", None, "Interface (local)", reverse("dcim:interface_list")), ) def run( # pylint: disable=too-many-arguments, arguments-differ @@ -868,10 +875,17 @@ class Meta: def data_mappings(cls): """This Job maps Region and Site objects from the local system to the remote system.""" return ( - DataMapping("Region (local)", reverse("dcim:location_list"), "Region (remote)", None), - DataMapping("Site (local)", reverse("dcim:location_list"), "Site (remote)", None), + DataMapping("LocationType (local)", reverse("dcim:locationtype_list"), "LocationType (remote)", None), + DataMapping("Location (local)", reverse("dcim:location_list"), "Location (remote)", None), + DataMapping("Role (local)", reverse("extras:role_list"), "Role (remote)", None), DataMapping("Prefix (local)", reverse("ipam:prefix_list"), "Prefix (remote)", None), + DataMapping("IPAddress (local)", reverse("ipam:ipaddress_list"), "IPAddress (remote)", None), DataMapping("Tenant (local)", reverse("tenancy:tenant_list"), "Tenant (remote)", None), + DataMapping("DeviceType (local)", reverse("dcim:devicetype_list"), "DeviceType (remote)", None), + DataMapping("Manufacturer (local)", reverse("dcim:manufacturer_list"), "Manufacturer (remote)", None), + DataMapping("Platform (local)", reverse("dcim:platform_list"), "Platform (remote)", None), + DataMapping("Device (local)", reverse("dcim:device_list"), "Device (remote)", None), + DataMapping("Interface (local)", reverse("dcim:interface_list"), "Interface (remote)", None), ) def load_source_adapter(self): From c82f3ca93f0928d4f4cd0859bcb5b15fd3bc7e81 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:28:42 -0500 Subject: [PATCH 123/229] =?UTF-8?q?refactor:=20=F0=9F=94=A5=20Remove=20reg?= =?UTF-8?q?ion=20and=20site=20models=20from=20lookup=5Fobject=20methods=20?= =?UTF-8?q?as=20they're=20no=20longer=20used.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 32 ++------------------------------ 1 file changed, 2 insertions(+), 30 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 1c3a5969f..b27783625 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -821,21 +821,7 @@ def load_target_adapter(self): def lookup_object(self, model_name, unique_id): """Look up a Nautobot object based on the DiffSync model name and unique ID.""" - if model_name == "region": - try: - return Location.objects.get( - name=unique_id, location_type=LocationType.objects.get_or_create(name="Region")[0] - ) - except Location.DoesNotExist: - pass - elif model_name == "site": - try: - return Location.objects.get( - name=unique_id, location_type=LocationType.objects.get_or_create(name="Site")[0] - ) - except Location.DoesNotExist: - pass - elif model_name == "prefix": + if model_name == "prefix": try: return Prefix.objects.get( prefix=unique_id.split("__")[0], tenant__name=unique_id.split("__")[1] or None @@ -900,21 +886,7 @@ def load_target_adapter(self): def lookup_object(self, model_name, unique_id): """Look up a Nautobot object based on the DiffSync model name and unique ID.""" - if model_name == "region": - try: - return Location.objects.get( - name=unique_id, location_type=LocationType.objects.get_or_create(name="Region")[0] - ) - except Location.DoesNotExist: - pass - elif model_name == "site": - try: - return Location.objects.get( - name=unique_id, location_type=LocationType.objects.get_or_create(name="Site") - ) - except Location.DoesNotExist: - pass - elif model_name == "prefix": + if model_name == "prefix": try: return Prefix.objects.get( prefix=unique_id.split("__")[0], tenant__name=unique_id.split("__")[1] or None From 6c4f9515b94e4a95581007fc1f6d9f9eedf13cfc Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:13:43 -0500 Subject: [PATCH 124/229] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20Namespace=20N?= =?UTF-8?q?autobotModel=20and=20update=20Prefix=20and=20IPAddress=20models?= =?UTF-8?q?=20to=20use=20it.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 48 +++++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 6 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index b27783625..b9becc2b0 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -13,7 +13,7 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.jobs import ObjectVar, StringVar from nautobot.extras.models import ExternalIntegration, Role -from nautobot.ipam.models import IPAddress, Prefix +from nautobot.ipam.models import IPAddress, Namespace, Prefix from nautobot.tenancy.models import Tenant from diffsync import DiffSync @@ -99,6 +99,22 @@ class RoleModel(NautobotModel): pk: Optional[UUID] +class NamespaceModel(NautobotModel): + """Shared data model representing a Namespace in either of the local or remote Nautobot instance.""" + + # Metadata about this model + _model = Namespace + _modelname = "namespace" + _identifiers = ("name",) + _attributes = ("description",) + + name: str + description: Optional[str] = "" + + # Not in _attributes or _identifiers, hence not included in diff calculations + pk: Optional[UUID] + + class PrefixModel(NautobotModel): """Shared data model representing a Prefix in either of the local or remote Nautobot instances.""" @@ -107,10 +123,11 @@ class PrefixModel(NautobotModel): _modelname = "prefix" _identifiers = ("network", "prefix_length", "tenant__name") # To keep this example simple, we don't include **all** attributes of a Prefix here. But you could! - _attributes = ("description", "status__name") + _attributes = ("description", "namespace__name", "status__name") # Data type declarations for all identifiers and attributes network: str + namespace__name: str prefix_length: int tenant__name: Optional[str] status__name: str @@ -129,7 +146,7 @@ class IPAddressModel(NautobotModel): _identifiers = ("host",) _attributes = ( "mask_length", - "parent__network", + "parent__namespace__name", "status__name", "ip_version", "tenant__name", @@ -138,7 +155,7 @@ class IPAddressModel(NautobotModel): # Data type declarations for all identifiers and attributes host: str mask_length: int - parent__network: str + parent__namespace__name: str status__name: str ip_version: int tenant__name: Optional[str] @@ -373,6 +390,7 @@ def create(cls, diffsync, ids, attrs): "network": ids["network"], "prefix_length": ids["prefix_length"], "tenant": {"name": ids["tenant__name"]} if ids["tenant__name"] else None, + "namespace": {"name": attrs["namespace__name"]} if attrs["namespace__name"] else None, "description": attrs["description"], "status": attrs["status__name"], }, @@ -413,6 +431,7 @@ class NautobotRemote(DiffSync): locationtype = LocationTypeModel location = LocationRemoteModel tenant = TenantRemoteModel + namespace = NamespaceModel prefix = PrefixRemoteModel ipaddress = IPAddressModel manufacturer = ManufacturerModel @@ -431,6 +450,7 @@ class NautobotRemote(DiffSync): "platform", "role", "device", + "namespace", "prefix", "ipaddress", ] @@ -471,6 +491,7 @@ def load(self): self.load_locations() self.load_roles() self.load_tenants() + self.load_namespaces() self.load_prefixes() self.load_ipaddresses() self.load_manufacturers() @@ -532,12 +553,23 @@ def load_tenants(self): ) self.add(tenant) + def load_namespaces(self): + """Load Namespaces data from remote Nautobot instance.""" + for namespace_entry in self._get_api_data("api/ipam/namespaces/?depth=1"): + namespace = self.namespace( + name=namespace_entry["name"], + description=namespace_entry["description"], + pk=namespace_entry["id"], + ) + self.add(namespace) + def load_prefixes(self): """Load Prefixes data from the remote Nautobot instance.""" - for prefix_entry in self._get_api_data("api/ipam/prefixes/?depth=1"): + for prefix_entry in self._get_api_data("api/ipam/prefixes/?depth=2"): prefix = self.prefix( network=prefix_entry["network"], prefix_length=prefix_entry["prefix_length"], + namespace__name=prefix_entry["namespace"]["name"], description=prefix_entry["description"], status__name=prefix_entry["status"]["name"] if prefix_entry["status"].get("name") else "Active", tenant__name=prefix_entry["tenant"]["name"] if prefix_entry["tenant"] else "", @@ -552,7 +584,7 @@ def load_ipaddresses(self): ipaddr = self.ipaddress( host=ipaddr_entry["host"], mask_length=ipaddr_entry["mask_length"], - parent__network=ipaddr_entry["parent"]["network"], + parent__namespace__name=ipaddr_entry["parent"]["namespace"]["name"], status__name=ipaddr_entry["status"]["name"], ip_version=ipaddr_entry["ip_version"], tenant__name=ipaddr_entry["tenant"]["name"], @@ -703,6 +735,7 @@ class NautobotLocal(NautobotAdapter): locationtype = LocationTypeModel location = LocationModel tenant = TenantModel + namespace = NamespaceModel prefix = PrefixModel ipaddress = IPAddressModel manufacturer = ManufacturerModel @@ -721,6 +754,7 @@ class NautobotLocal(NautobotAdapter): "platform", "role", "device", + "namespace", "prefix", "ipaddress", ] @@ -766,6 +800,7 @@ def data_mappings(cls): DataMapping("LocationType (remote)", None, "LocationType (local)", reverse("dcim:locationtype_list")), DataMapping("Location (remote)", None, "Location (local)", reverse("dcim:location_list")), DataMapping("Role (remote)", None, "Role (local)", reverse("extras:role_list")), + DataMapping("Namespace (remote)", None, "Namespace (local)", reverse("ipam:namespace_list")), DataMapping("Prefix (remote)", None, "Prefix (local)", reverse("ipam:prefix_list")), DataMapping("IPAddress (remote)", None, "IPAddress (local)", reverse("ipam:ipaddress_list")), DataMapping("Tenant (remote)", None, "Tenant (local)", reverse("tenancy:tenant_list")), @@ -864,6 +899,7 @@ def data_mappings(cls): DataMapping("LocationType (local)", reverse("dcim:locationtype_list"), "LocationType (remote)", None), DataMapping("Location (local)", reverse("dcim:location_list"), "Location (remote)", None), DataMapping("Role (local)", reverse("extras:role_list"), "Role (remote)", None), + DataMapping("Namespace (local)", reverse("ipam:prefix_list"), "Namespace (remote)", None), DataMapping("Prefix (local)", reverse("ipam:prefix_list"), "Prefix (remote)", None), DataMapping("IPAddress (local)", reverse("ipam:ipaddress_list"), "IPAddress (remote)", None), DataMapping("Tenant (local)", reverse("tenancy:tenant_list"), "Tenant (remote)", None), From 08942f8e3bdcaa4de252600aeb68db5504d69676 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:14:07 -0500 Subject: [PATCH 125/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20source?= =?UTF-8?q?=20Job=20arg=20to=20not=20be=20required.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index b9becc2b0..300857017 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -772,6 +772,7 @@ class ExampleDataSource(DataSource): queryset=ExternalIntegration.objects.all(), display_field="display", label="Nautobot Demo Instance", + required=False, ) source_url = StringVar( description="Remote Nautobot instance to load Sites and Regions from", default="https://demo.nautobot.com" From 66299ade872eca7e704fc6c99153c31bd91bdc06 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:14:26 -0500 Subject: [PATCH 126/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20API=20U?= =?UTF-8?q?RI=20for=20IPAddress=20query.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 300857017..9fc9a046c 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -580,7 +580,7 @@ def load_prefixes(self): def load_ipaddresses(self): """Load IPAddresses data from the remote Nautobot instance.""" - for ipaddr_entry in self._get_api_data("api/ipam/ipaddresses/?depth=2"): + for ipaddr_entry in self._get_api_data("api/ipam/ip-addresses/?depth=2"): ipaddr = self.ipaddress( host=ipaddr_entry["host"], mask_length=ipaddr_entry["mask_length"], From b7e7ed4cc14a2b5d01b0970e747a6d7e7d9bbbd2 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:16:22 -0500 Subject: [PATCH 127/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Increase=20timeou?= =?UTF-8?q?ts=20for=20API=20queries=20to=205=20minutes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 9fc9a046c..0f5d66a2c 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -478,10 +478,10 @@ def __init__(self, *args, url=None, token=None, job=None, **kwargs): def _get_api_data(self, url_path: str) -> Mapping: """Returns data from a url_path using pagination.""" - data = requests.get(f"{self.url}/{url_path}", headers=self.headers, params={"limit": 0}, timeout=60).json() + data = requests.get(f"{self.url}/{url_path}", headers=self.headers, params={"limit": 0}, timeout=600).json() result_data = data["results"] while data["next"]: - data = requests.get(data["next"], headers=self.headers, params={"limit": 0}, timeout=60).json() + data = requests.get(data["next"], headers=self.headers, params={"limit": 0}, timeout=600).json() result_data.extend(data["results"]) return result_data From c430dfdcef7294359bc5a4b72df0601c77f1c6ef Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 10:16:41 -0500 Subject: [PATCH 128/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20childre?= =?UTF-8?q?n=20definition=20for=20Interfaces=20on=20Device=20NautobotModel?= =?UTF-8?q?.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 0f5d66a2c..5451c3ec1 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -262,7 +262,7 @@ class DeviceModel(NautobotModel): status__name: str tenant__name: Optional[str] asset_tag: Optional[str] - interface: List["Interface"] = [] + interfaces: List["InterfaceModel"] = [] class InterfaceModel(NautobotModel): From 3023ef3d91a1d59b92bbbb2fb88baa9fa4afce0e Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 14:26:16 -0500 Subject: [PATCH 129/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Revert=20IPAddres?= =?UTF-8?q?s=20parent=20Namespace=20to=20parent=20network.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 5451c3ec1..ec509ad3a 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -143,10 +143,9 @@ class IPAddressModel(NautobotModel): # Metadata about this model _model = IPAddress _modelname = "ipaddress" - _identifiers = ("host",) + _identifiers = ("host", "parent__network") _attributes = ( "mask_length", - "parent__namespace__name", "status__name", "ip_version", "tenant__name", @@ -155,7 +154,7 @@ class IPAddressModel(NautobotModel): # Data type declarations for all identifiers and attributes host: str mask_length: int - parent__namespace__name: str + parent__network: str status__name: str ip_version: int tenant__name: Optional[str] @@ -584,7 +583,7 @@ def load_ipaddresses(self): ipaddr = self.ipaddress( host=ipaddr_entry["host"], mask_length=ipaddr_entry["mask_length"], - parent__namespace__name=ipaddr_entry["parent"]["namespace"]["name"], + parent__network=ipaddr_entry["parent"]["network"], status__name=ipaddr_entry["status"]["name"], ip_version=ipaddr_entry["ip_version"], tenant__name=ipaddr_entry["tenant"]["name"], From a148f247e020724ef121ec49f25f8d08062039b6 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 14:26:41 -0500 Subject: [PATCH 130/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20loading?= =?UTF-8?q?=20of=20Tenant=20to=20first=20check=20Tenant=20is=20assigned.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index ec509ad3a..b40ad5566 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -586,7 +586,7 @@ def load_ipaddresses(self): parent__network=ipaddr_entry["parent"]["network"], status__name=ipaddr_entry["status"]["name"], ip_version=ipaddr_entry["ip_version"], - tenant__name=ipaddr_entry["tenant"]["name"], + tenant__name=ipaddr_entry["tenant"]["name"] if ipaddr_entry.get("tenant") else "", pk=ipaddr_entry["id"], ) self.add(ipaddr) From b49c271b6cdc279095ce1c4afdd76c9dcfba4465 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 14:27:15 -0500 Subject: [PATCH 131/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20Interfa?= =?UTF-8?q?ce=20type=20to=20use=20value=20key.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index b40ad5566..bc88aa4ea 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -680,7 +680,7 @@ def load_interfaces(self): mac_address=interface["mac_address"], mgmt_only=interface["mgmt_only"], mtu=interface["mtu"], - type=interface["type"], + type=interface["type"]["value"], status__name=interface["status"]["name"], pk=interface["id"], ) From 52c8fc8a42905e9f1e77929ff1d5c2da65f299c0 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 14:28:01 -0500 Subject: [PATCH 132/229] =?UTF-8?q?refactor:=20=F0=9F=93=9D=20Refactor=20l?= =?UTF-8?q?ogging=20to=20be=20clearer=20about=20what's=20happening=20durin?= =?UTF-8?q?g=20loading.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index bc88aa4ea..975e28b7b 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -601,6 +601,7 @@ def load_manufacturers(self): pk=manufacturer["id"], ) self.add(manufacturer) + self.job.logger.debug(f"Loaded {manufacturer} from remote Nautobot instance") def load_device_types(self): """Load DeviceTypes data from the remote Nautobot instance.""" @@ -616,6 +617,7 @@ def load_device_types(self): pk=device_type["id"], ) self.add(devicetype) + self.job.logger.debug(f"Loaded {devicetype} from remote Nautobot instance") manufacturer.add_child(devicetype) except ObjectNotFound: self.job.logger.debug(f"Unable to find Manufacturer {device_type['manufacturer']['name']}") @@ -632,6 +634,7 @@ def load_platforms(self): pk=platform["id"], ) self.add(platform) + self.job.logger.debug(f"Loaded {platform} from remote Nautobot instance") def load_devices(self): """Load Devices data from the remote Nautobot instance.""" @@ -657,9 +660,11 @@ def load_devices(self): pk=device["id"], ) self.add(device) + self.job.logger.debug(f"Loaded {device} from remote Nautobot instance") def load_interfaces(self): """Load Interfaces data from the remote Nautobot instance.""" + self.job.logger.info("Pulling data from remote Nautobot instance for Interfaces.") for interface in self._get_api_data("api/dcim/interfaces/?depth=3"): try: dev = self.get( @@ -685,6 +690,9 @@ def load_interfaces(self): pk=interface["id"], ) self.add(new_interface) + self.job.logger.debug( + f"Loaded {new_interface} for {interface['device']['name']} from remote Nautobot instance" + ) dev.add_child(new_interface) except ObjectNotFound: self.job.logger.warning(f"Unable to find Device {interface['device']['name']} loaded.") @@ -852,7 +860,6 @@ def load_target_adapter(self): """Method to instantiate and load the TARGET adapter into `self.target_adapter`.""" self.target_adapter = NautobotLocal(job=self, sync=self.sync) self.target_adapter.load() - self.logger.info(f"Found {self.target_adapter.count('region')} regions") def lookup_object(self, model_name, unique_id): """Look up a Nautobot object based on the DiffSync model name and unique ID.""" From a052217390c7345e4d1016b27da83c05ab71e41f Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 30 May 2024 14:28:38 -0500 Subject: [PATCH 133/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Change=20limit=20?= =?UTF-8?q?from=200=20to=20200=20to=20enable=20pulling=20of=20larger=20obj?= =?UTF-8?q?ect=20sets=20like=20Interfaces=20and=20IPAddresses.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 975e28b7b..122227065 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -477,10 +477,10 @@ def __init__(self, *args, url=None, token=None, job=None, **kwargs): def _get_api_data(self, url_path: str) -> Mapping: """Returns data from a url_path using pagination.""" - data = requests.get(f"{self.url}/{url_path}", headers=self.headers, params={"limit": 0}, timeout=600).json() + data = requests.get(f"{self.url}/{url_path}", headers=self.headers, params={"limit": 200}, timeout=60).json() result_data = data["results"] while data["next"]: - data = requests.get(data["next"], headers=self.headers, params={"limit": 0}, timeout=600).json() + data = requests.get(data["next"], headers=self.headers, params={"limit": 200}, timeout=60).json() result_data.extend(data["results"]) return result_data From 825ec087ef689096bdf84664932d4afc29200a9d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 3 Jun 2024 08:56:20 -0500 Subject: [PATCH 134/229] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20StatusModel?= =?UTF-8?q?=20to=20sync=20Statuses=20for=20Prefix=20and=20IPAddresses.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 36 +++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 122227065..30f5167f0 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -12,7 +12,7 @@ from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.jobs import ObjectVar, StringVar -from nautobot.extras.models import ExternalIntegration, Role +from nautobot.extras.models import ExternalIntegration, Role, Status from nautobot.ipam.models import IPAddress, Namespace, Prefix from nautobot.tenancy.models import Tenant @@ -99,6 +99,23 @@ class RoleModel(NautobotModel): pk: Optional[UUID] +class StatusModel(NautobotModel): + """Shared data model representing a Status in either of the local or remote Nautobot instances.""" + + # Metadata about this model + _model = Status + _modelname = "status" + _identifiers = ("name",) + _attributes = ("content_types", "color") + + name: str + color: str + content_types: List[ContentTypeDict] = [] + + # Not in _attributes or _identifiers, hence not included in diff calculations + pk: Optional[UUID] + + class NamespaceModel(NautobotModel): """Shared data model representing a Namespace in either of the local or remote Nautobot instance.""" @@ -437,12 +454,14 @@ class NautobotRemote(DiffSync): device_type = DeviceTypeModel platform = PlatformModel role = RoleModel + status = StatusModel device = DeviceModel interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models top_level = [ "tenant", + "status", "locationtype", "location", "manufacturer", @@ -486,6 +505,7 @@ def _get_api_data(self, url_path: str) -> Mapping: def load(self): """Load data from the remote Nautobot instance.""" + self.load_statuses() self.load_location_types() self.load_locations() self.load_roles() @@ -543,6 +563,18 @@ def load_roles(self): ) self.add(role) + def load_statuses(self): + """Load Statuses data from the remote Nautobot instance.""" + for status_entry in self._get_api_data("api/extras/statuses/?depth=1"): + content_types = self.get_content_types(status_entry) + status = self.status( + name=status_entry["name"], + color=status_entry["color"], + content_types=content_types, + pk=status_entry["id"], + ) + self.add(status) + def load_tenants(self): """Load Tenants data from the remote Nautobot instance.""" for tenant_entry in self._get_api_data("api/tenancy/tenants/?depth=1"): @@ -749,12 +781,14 @@ class NautobotLocal(NautobotAdapter): device_type = DeviceTypeModel platform = PlatformModel role = RoleModel + status = StatusModel device = DeviceModel interface = InterfaceModel # Top-level class labels, i.e. those classes that are handled directly rather than as children of other models top_level = [ "tenant", + "status", "locationtype", "location", "manufacturer", From ef20cbd5c72f5d83dad10c0290a79bf4ca43abe5 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Mon, 3 Jun 2024 08:57:26 -0500 Subject: [PATCH 135/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Add=20parent=20pr?= =?UTF-8?q?efix=5Flength=20and=20Namespace=20for=20IPAddress=20class=20att?= =?UTF-8?q?ributes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 30f5167f0..0a10ce0ec 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -160,9 +160,8 @@ class IPAddressModel(NautobotModel): # Metadata about this model _model = IPAddress _modelname = "ipaddress" - _identifiers = ("host", "parent__network") + _identifiers = ("host", "mask_length", "parent__network", "parent__prefix_length", "parent__namespace__name") _attributes = ( - "mask_length", "status__name", "ip_version", "tenant__name", @@ -172,6 +171,8 @@ class IPAddressModel(NautobotModel): host: str mask_length: int parent__network: str + parent__prefix_length: int + parent__namespace__name: str status__name: str ip_version: int tenant__name: Optional[str] @@ -616,6 +617,8 @@ def load_ipaddresses(self): host=ipaddr_entry["host"], mask_length=ipaddr_entry["mask_length"], parent__network=ipaddr_entry["parent"]["network"], + parent__prefix_length=ipaddr_entry["parent"]["prefix_length"], + parent__namespace__name=ipaddr_entry["parent"]["namespace"]["name"], status__name=ipaddr_entry["status"]["name"], ip_version=ipaddr_entry["ip_version"], tenant__name=ipaddr_entry["tenant"]["name"] if ipaddr_entry.get("tenant") else "", From e09414aa1fc17c7aac9ed088b0ff80a103f64040 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 12 Jun 2024 22:05:24 -0500 Subject: [PATCH 136/229] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20support=20for?= =?UTF-8?q?=20Locations=20on=20PrefixModel?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 0a10ce0ec..c86dce7ac 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -3,7 +3,7 @@ # Skip colon check for multiple statements on one line. # flake8: noqa: E701 -from typing import Optional, Mapping, List +from typing import Optional, Mapping, List, TypedDict from uuid import UUID from django.contrib.contenttypes.models import ContentType from django.templatetags.static import static @@ -53,6 +53,13 @@ class LocationTypeModel(NautobotModel): pk: Optional[UUID] +class LocationDict(TypedDict): + """This typed dict is for M2M Locations.""" + + name: str + location_type__name: str + + class LocationModel(NautobotModel): """Shared data model representing a Location in either of the local or remote Nautobot instances.""" @@ -140,7 +147,7 @@ class PrefixModel(NautobotModel): _modelname = "prefix" _identifiers = ("network", "prefix_length", "tenant__name") # To keep this example simple, we don't include **all** attributes of a Prefix here. But you could! - _attributes = ("description", "namespace__name", "status__name") + _attributes = ("description", "namespace__name", "status__name", "locations") # Data type declarations for all identifiers and attributes network: str @@ -150,6 +157,8 @@ class PrefixModel(NautobotModel): status__name: str description: str + locations: Optional[List[LocationDict]] = [] + # Not in _attributes or _identifiers, hence not included in diff calculations pk: Optional[UUID] @@ -603,6 +612,10 @@ def load_prefixes(self): prefix_length=prefix_entry["prefix_length"], namespace__name=prefix_entry["namespace"]["name"], description=prefix_entry["description"], + locations=[ + {"name": x["name"], "location_type__name": x["location_type"]["name"]} + for x in prefix_entry["locations"] + ], status__name=prefix_entry["status"]["name"] if prefix_entry["status"].get("name") else "Active", tenant__name=prefix_entry["tenant"]["name"] if prefix_entry["tenant"] else "", pk=prefix_entry["id"], From de2f24ede6d4d6f8298b2ad560844c9f8c14f6ca Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 08:06:55 -0500 Subject: [PATCH 137/229] =?UTF-8?q?refactor:=20=F0=9F=94=A5=20Remove=20pk?= =?UTF-8?q?=20attribute=20from=20models=20as=20it's=20already=20on=20Nauto?= =?UTF-8?q?botModel.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index c86dce7ac..9d9a9efa2 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -49,9 +49,6 @@ class LocationTypeModel(NautobotModel): parent__name: Optional[str] content_types: List[ContentTypeDict] = [] - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class LocationDict(TypedDict): """This typed dict is for M2M Locations.""" @@ -86,9 +83,6 @@ class LocationModel(NautobotModel): tenant__name: Optional[str] description: str - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class RoleModel(NautobotModel): """Shared data model representing a Role in either of the local or remote Nautobot instances.""" @@ -102,9 +96,6 @@ class RoleModel(NautobotModel): name: str content_types: List[ContentTypeDict] = [] - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class StatusModel(NautobotModel): """Shared data model representing a Status in either of the local or remote Nautobot instances.""" @@ -119,9 +110,6 @@ class StatusModel(NautobotModel): color: str content_types: List[ContentTypeDict] = [] - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class NamespaceModel(NautobotModel): """Shared data model representing a Namespace in either of the local or remote Nautobot instance.""" @@ -135,9 +123,6 @@ class NamespaceModel(NautobotModel): name: str description: Optional[str] = "" - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class PrefixModel(NautobotModel): """Shared data model representing a Prefix in either of the local or remote Nautobot instances.""" @@ -159,9 +144,6 @@ class PrefixModel(NautobotModel): locations: Optional[List[LocationDict]] = [] - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class IPAddressModel(NautobotModel): """Shared data model representing an IPAddress in either of the local or remote Nautobot instances.""" @@ -186,8 +168,6 @@ class IPAddressModel(NautobotModel): ip_version: int tenant__name: Optional[str] - pk: Optional[UUID] - class TenantModel(NautobotModel): """Shared data model representing a Tenant in either of the local or remote Nautobot instances.""" @@ -201,8 +181,6 @@ class TenantModel(NautobotModel): name: str prefixes: List[PrefixModel] = [] - pk: Optional[UUID] - class DeviceTypeModel(NautobotModel): """Shared data model representing a DeviceType in either of the local or remote Nautobot instances.""" @@ -218,9 +196,6 @@ class DeviceTypeModel(NautobotModel): u_height: int is_full_depth: bool - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class ManufacturerModel(NautobotModel): """Shared data model representing a Manufacturer in either of the local or remote Nautobot instances.""" @@ -235,9 +210,6 @@ class ManufacturerModel(NautobotModel): description: str device_types: List[DeviceTypeModel] = [] - # Not in _attributes or _identifiers, hence not included in diff calculations - pk: Optional[UUID] - class PlatformModel(NautobotModel): """Shared data model representing a Platform in either of the local or remote Nautobot instances.""" From c9c4d92bdeb6781a6d4c3698eecdceb095f3514d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 08:08:24 -0500 Subject: [PATCH 138/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Remove=20Optional?= =?UTF-8?q?=20from=20locations=20attribute=20as=20not=20required=20with=20?= =?UTF-8?q?default[].?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index 9d9a9efa2..a91afe91c 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -142,7 +142,7 @@ class PrefixModel(NautobotModel): status__name: str description: str - locations: Optional[List[LocationDict]] = [] + locations: List[LocationDict] = [] class IPAddressModel(NautobotModel): From 19bd2419ff6f645faec479f59cb332211f80b9be Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 08:21:05 -0500 Subject: [PATCH 139/229] =?UTF-8?q?test:=20=F0=9F=94=A5=20Remove=20unused?= =?UTF-8?q?=20import?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index a91afe91c..ac8a941df 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -4,7 +4,6 @@ # flake8: noqa: E701 from typing import Optional, Mapping, List, TypedDict -from uuid import UUID from django.contrib.contenttypes.models import ContentType from django.templatetags.static import static from django.urls import reverse From 48f7e1bf9ebd98ec1f8259b46e0c01c6922a3732 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 08:33:12 -0500 Subject: [PATCH 140/229] =?UTF-8?q?test:=20=F0=9F=9A=A8=20Remove=20invalid?= =?UTF-8?q?=20pylint=20disables=20that=20were=20place=20as=20rule=20has=20?= =?UTF-8?q?been=20fixed.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tables.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nautobot_ssot/tables.py b/nautobot_ssot/tables.py index eb953259f..117a0f0d3 100644 --- a/nautobot_ssot/tables.py +++ b/nautobot_ssot/tables.py @@ -36,7 +36,7 @@ MESSAGE_SPAN = """{% if record.message %}{{ record.message }}{% else %}—{% endif %}""" -class DashboardTable(BaseTable): # pylint: disable=nb-sub-class-name +class DashboardTable(BaseTable): """Abbreviated version of SyncTable, for use with the dashboard.""" start_time = DateTimeColumn(linkify=True, short=True) @@ -49,7 +49,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of DashboardTable.""" model = Sync - fields = ["source", "target", "start_time", "status", "dry_run"] # pylint: disable=nb-use-fields-all + fields = ["source", "target", "start_time", "status", "dry_run"] order_by = ["-start_time"] @@ -105,7 +105,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of SyncTable.""" model = Sync - fields = ( # pylint: disable=nb-use-fields-all + fields = ( "pk", "source", "target", @@ -138,7 +138,7 @@ class Meta(BaseTable.Meta): order_by = ("-start_time",) -class SyncTableSingleSourceOrTarget(SyncTable): # pylint: disable=nb-no-model-found +class SyncTableSingleSourceOrTarget(SyncTable): """Subclass of SyncTable with fewer default columns.""" class Meta(SyncTable.Meta): @@ -187,7 +187,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of SyncLogEntryTable.""" model = SyncLogEntry - fields = ( # pylint: disable=nb-use-fields-all + fields = ( "pk", "timestamp", "sync", From 3f208034f5a09a16794bc415945c776ca94f5365 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 08:45:19 -0500 Subject: [PATCH 141/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20snippets.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/469.added | 1 + changes/469.fixed | 1 + 2 files changed, 2 insertions(+) create mode 100644 changes/469.added create mode 100644 changes/469.fixed diff --git a/changes/469.added b/changes/469.added new file mode 100644 index 000000000..3d94e1181 --- /dev/null +++ b/changes/469.added @@ -0,0 +1 @@ +Added more models for import in Example Jobs. \ No newline at end of file diff --git a/changes/469.fixed b/changes/469.fixed new file mode 100644 index 000000000..c39bac900 --- /dev/null +++ b/changes/469.fixed @@ -0,0 +1 @@ +Removed invalid pylint disables as disabled rule has been corrected. \ No newline at end of file From 8aaf32a73c0deff15bab27bf43efa460ee0f245f Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 09:13:59 -0500 Subject: [PATCH 142/229] =?UTF-8?q?revert:=20=E2=8F=AA=EF=B8=8F=20Revert?= =?UTF-8?q?=20pylint=20changes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/469.fixed | 1 - nautobot_ssot/tables.py | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) delete mode 100644 changes/469.fixed diff --git a/changes/469.fixed b/changes/469.fixed deleted file mode 100644 index c39bac900..000000000 --- a/changes/469.fixed +++ /dev/null @@ -1 +0,0 @@ -Removed invalid pylint disables as disabled rule has been corrected. \ No newline at end of file diff --git a/nautobot_ssot/tables.py b/nautobot_ssot/tables.py index 117a0f0d3..eb953259f 100644 --- a/nautobot_ssot/tables.py +++ b/nautobot_ssot/tables.py @@ -36,7 +36,7 @@ MESSAGE_SPAN = """{% if record.message %}{{ record.message }}{% else %}—{% endif %}""" -class DashboardTable(BaseTable): +class DashboardTable(BaseTable): # pylint: disable=nb-sub-class-name """Abbreviated version of SyncTable, for use with the dashboard.""" start_time = DateTimeColumn(linkify=True, short=True) @@ -49,7 +49,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of DashboardTable.""" model = Sync - fields = ["source", "target", "start_time", "status", "dry_run"] + fields = ["source", "target", "start_time", "status", "dry_run"] # pylint: disable=nb-use-fields-all order_by = ["-start_time"] @@ -105,7 +105,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of SyncTable.""" model = Sync - fields = ( + fields = ( # pylint: disable=nb-use-fields-all "pk", "source", "target", @@ -138,7 +138,7 @@ class Meta(BaseTable.Meta): order_by = ("-start_time",) -class SyncTableSingleSourceOrTarget(SyncTable): +class SyncTableSingleSourceOrTarget(SyncTable): # pylint: disable=nb-no-model-found """Subclass of SyncTable with fewer default columns.""" class Meta(SyncTable.Meta): @@ -187,7 +187,7 @@ class Meta(BaseTable.Meta): """Metaclass attributes of SyncLogEntryTable.""" model = SyncLogEntry - fields = ( + fields = ( # pylint: disable=nb-use-fields-all "pk", "timestamp", "sync", From 8f0ec6113504ea1505b6c9fc8b5b0af0be8da147 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 13 Jun 2024 11:12:16 -0500 Subject: [PATCH 143/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20import?= =?UTF-8?q?=20for=20TypedDict=20to=20account=20for=20python=20versions.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/jobs/examples.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index ac8a941df..423f471d1 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -3,7 +3,12 @@ # Skip colon check for multiple statements on one line. # flake8: noqa: E701 -from typing import Optional, Mapping, List, TypedDict +try: + from typing_extensions import TypedDict # Python<3.9 +except ImportError: + from typing import TypedDict # Python>=3.9 + +from typing import Optional, Mapping, List from django.contrib.contenttypes.models import ContentType from django.templatetags.static import static from django.urls import reverse From 965862f4ce6cee08d74eec63b36884f5f3c74440 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:09:47 +0100 Subject: [PATCH 144/229] Add choices for deletable models. --- .../integrations/infoblox/choices.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/nautobot_ssot/integrations/infoblox/choices.py b/nautobot_ssot/integrations/infoblox/choices.py index c6a670c8f..30e3f5b07 100644 --- a/nautobot_ssot/integrations/infoblox/choices.py +++ b/nautobot_ssot/integrations/infoblox/choices.py @@ -42,3 +42,39 @@ class DNSRecordTypeChoices(ChoiceSet): (A_RECORD, "Create A record"), (A_AND_PTR_RECORD, "Create A and PTR records"), ) + + +class InfobloxDeletableModelChoices(ChoiceSet): + """Choiceset used by SSOTInfobloxConfig.""" + + DNS_A_RECORD = "dns-a-record" + DNS_HOST_RECORD = "dns-host-record" + DNS_PTR_RECORD = "dns-ptr-record" + FIXED_ADDRESS = "fixed-address" + + CHOICES = ( + (DNS_A_RECORD, "DNS A Record"), + (DNS_HOST_RECORD, "DNS Host Record"), + (DNS_PTR_RECORD, "DNS PTR Record"), + (FIXED_ADDRESS, "Fixed Address"), + ) + + +class NautobotDeletableModelChoices(ChoiceSet): + """Choiceset used by SSOTInfobloxConfig.""" + + DNS_A_RECORD = "dns-a-record" + DNS_HOST_RECORD = "dns-host-record" + DNS_PTR_RECORD = "dns-ptr-record" + IP_ADDRESS = "ip-address" + VLAN = "vlan" + VLAN_GROUP = "vlan-group" + + CHOICES = ( + (DNS_A_RECORD, "DNS A Record"), + (DNS_HOST_RECORD, "DNS Host Record"), + (DNS_PTR_RECORD, "DNS PTR Record"), + (IP_ADDRESS, "IP Address"), + (VLAN, "VLAN"), + (VLAN_GROUP, "VLAN Group"), + ) From 0b4eecc731986e5f4dc679a5226bf2b92e39e1d1 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:10:35 +0100 Subject: [PATCH 145/229] Load A, PTR, and Host records into their own models. --- .../infoblox/diffsync/adapters/infoblox.py | 162 ++++++++++++++---- 1 file changed, 125 insertions(+), 37 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index ce819b267..e8cae6842 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -8,7 +8,11 @@ from diffsync.exceptions import ObjectAlreadyExists from nautobot.extras.plugins.exceptions import PluginImproperlyConfigured +from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import ( + InfobloxDnsHostRecord, + InfobloxDnsARecord, + InfobloxDnsPTRRecord, InfobloxIPAddress, InfobloxNamespace, InfobloxNetwork, @@ -20,7 +24,6 @@ build_vlan_map, get_ext_attr_dict, map_network_view_to_namespace, - validate_dns_name, ) @@ -36,8 +39,20 @@ class InfobloxAdapter(DiffSync): ipaddress = InfobloxIPAddress vlangroup = InfobloxVLANView vlan = InfobloxVLAN - - top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress"] + dnshostrecord = InfobloxDnsHostRecord + dnsarecord = InfobloxDnsARecord + dnsptrrecord = InfobloxDnsPTRRecord + + top_level = [ + "namespace", + "vlangroup", + "vlan", + "prefix", + "ipaddress", + "dnshostrecord", + "dnsarecord", + "dnsptrrecord", + ] def __init__(self, *args, job=None, sync=None, conn, config, **kwargs): """Initialize Infoblox. @@ -211,16 +226,6 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals for _ip in ipaddrs: _, prefix_length = _ip["network"].split("/") network_view = _ip["network_view"] - dns_name = "" - # Record can have multiple names, if there is a DNS record attached we should use that name - for dns_name_candidate in _ip["names"]: - if not validate_dns_name( - infoblox_client=self.conn, dns_name=dns_name_candidate, network_view=network_view - ): - continue - dns_name = dns_name_candidate - break - namespace = map_network_view_to_namespace(value=network_view, direction="nv_to_ns") ip_ext_attrs = get_ext_attr_dict(extattrs=_ip.get("extattrs", {}), excluded_attrs=self.excluded_attrs) @@ -229,41 +234,28 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals prefix=_ip["network"], prefix_length=prefix_length, namespace=namespace, - dns_name=dns_name, status=self.conn.get_ipaddr_status(_ip), - ip_addr_type=self.conn.get_ipaddr_type(_ip), + description="", + ip_addr_type="host", ext_attrs={**default_ext_attrs, **ip_ext_attrs}, mac_address="" if not _ip["mac_address"] else _ip["mac_address"], - fixed_address_name="", fixed_address_comment="", ) # Record references to DNS Records linked to this IP Address. # Field `comment` in IP Address records can come from linked fixed address or DNS record. # We add extra logic to tell DNS record and fixed address comments apart. - # NOTE: We are assuming that Host/A/PTR comments are the same. - # If they're not, the first one found will be treated as the correct one. - dns_comment = "" for ref in _ip["objects"]: obj_type = ref.split("/")[0] if obj_type == "record:host": new_ip.has_host_record = True - new_ip.host_record_ref = ref - if not dns_comment: - host_record = self.conn.get_host_record_by_ref(ref) - dns_comment = host_record.get("comment", "") + host_record_ref = ref elif obj_type == "record:a": new_ip.has_a_record = True - new_ip.a_record_ref = ref - if not dns_comment: - a_record = self.conn.get_a_record_by_ref(ref) - dns_comment = a_record.get("comment", "") + a_record_ref = ref elif obj_type == "record:ptr": new_ip.has_ptr_record = True - new_ip.ptr_record_ref = ref - if not dns_comment: - ptr_record = self.conn.get_ptr_record_by_ref(ref) - dns_comment = ptr_record.get("comment", "") + ptr_record_ref = ref # We currently only support RESERVED and MAC_ADDRESS types for fixed address elif obj_type == "fixedaddress": if "RESERVATION" in _ip["types"]: @@ -275,15 +267,111 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals new_ip.has_fixed_address = True new_ip.fixed_address_ref = ref - new_ip.description = dns_comment - - # Fixed address name and comment values can differ from the DNS name and comment retrieved from the `names` array on the IP Address record. + # We use Nautobot IP Address description for Infoblox Fixed Address name if new_ip.has_fixed_address: fixed_address = self.conn.get_fixed_address_by_ref(new_ip.fixed_address_ref) - new_ip.fixed_address_name = fixed_address.get("name", "") - new_ip.fixed_address_comment = fixed_address.get("comment", "") + new_ip.description = fixed_address.get("name") or "" + new_ip.fixed_address_comment = fixed_address.get("comment") or "" + + # Default type is `host` but fixed address records must be `dhcp` + if ( + new_ip.has_fixed_address + and self.config.fixed_address_type != FixedAddressTypeChoices.DONT_CREATE_RECORD + ): + new_ip.ip_addr_type = "dhcp" + + # Load individual DNS records + if new_ip.has_a_record: + self._load_dns_a_record_for_ip(ref=a_record_ref, ip_record=new_ip, namespace=namespace) + if new_ip.has_host_record: + self._load_dns_host_record_for_ip(ref=host_record_ref, ip_record=new_ip, namespace=namespace) + if new_ip.has_ptr_record: + self._load_dns_ptr_record_for_ip(ref=ptr_record_ref, ip_record=new_ip, namespace=namespace) + + if new_ip.has_fixed_address or new_ip.has_a_record or new_ip.has_ptr_record: + self.add(new_ip) + + def _load_dns_host_record_for_ip(self, ref: str, ip_record: object, namespace: str): + """Load the DNS Host record. + + Args: + ref (list): Host record reference + ip_record (object): Parent IP Address record + namespace (str): Namespace of this record + """ + host_record = self.conn.get_host_record_by_ref(ref) + record_ext_attrs = get_ext_attr_dict( + extattrs=host_record.get("extattrs", {}), excluded_attrs=self.excluded_attrs + ) + + new_host_record = self.dnshostrecord( + address=ip_record.address, + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=namespace, + dns_name=host_record["name"], + ip_addr_type=ip_record.ip_addr_type, + description=host_record.get("comment") or "", + status=ip_record.status, + ext_attrs=record_ext_attrs, + ref=ref, + ) + + self.add(new_host_record) + + def _load_dns_a_record_for_ip(self, ref: str, ip_record: object, namespace: str): + """Load the DNS A record. - self.add(new_ip) + Args: + ref (list): A record reference + ip_record (object): Parent IP Address record + namespace (str): Namespace of this record + """ + a_record = self.conn.get_a_record_by_ref(ref) + record_ext_attrs = get_ext_attr_dict(extattrs=a_record.get("extattrs", {}), excluded_attrs=self.excluded_attrs) + + new_a_record = self.dnsarecord( + address=a_record["ipv4addr"], + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=namespace, + dns_name=a_record["name"], + ip_addr_type=ip_record.ip_addr_type, + description=a_record.get("comment") or "", + status=ip_record.status, + ext_attrs=record_ext_attrs, + ref=ref, + ) + + self.add(new_a_record) + + def _load_dns_ptr_record_for_ip(self, ref: str, ip_record: object, namespace: str): + """Load the DNS PTR record. + + Args: + ref (list): PTR record reference + ip_record (object): Parent IP Address record + namespace (str): Namespace of this record + """ + ptr_record = self.conn.get_ptr_record_by_ref(ref) + record_ext_attrs = get_ext_attr_dict( + extattrs=ptr_record.get("extattrs", {}), excluded_attrs=self.excluded_attrs + ) + + new_ptr_record = self.dnsptrrecord( + address=ptr_record["ipv4addr"], + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=namespace, + dns_name=ptr_record["ptrdname"], + ip_addr_type=ip_record.ip_addr_type, + description=ptr_record.get("comment") or "", + status=ip_record.status, + ext_attrs=record_ext_attrs, + ref=ref, + ) + + self.add(new_ptr_record) def load_vlanviews(self): """Load InfobloxVLANView DiffSync model.""" From fcef9f25eceace9d40aa14785ebe9e5eae628081 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:10:55 +0100 Subject: [PATCH 146/229] Load A, PTR, and Host records into their own models. --- .../infoblox/diffsync/adapters/nautobot.py | 103 ++++++++++++++++-- 1 file changed, 92 insertions(+), 11 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 7bf3c203a..95d857070 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -17,6 +17,9 @@ from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.constant import TAG_COLOR from nautobot_ssot.integrations.infoblox.diffsync.models import ( + NautobotDnsARecord, + NautobotDnsHostRecord, + NautobotDnsPTRRecord, NautobotIPAddress, NautobotNamespace, NautobotNetwork, @@ -102,8 +105,11 @@ class NautobotAdapter(NautobotMixin, DiffSync): # pylint: disable=too-many-inst ipaddress = NautobotIPAddress vlangroup = NautobotVlanGroup vlan = NautobotVlan + dnshostrecord = NautobotDnsHostRecord + dnsarecord = NautobotDnsARecord + dnsptrrecord = NautobotDnsPTRRecord - top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress"] + top_level = ["namespace", "vlangroup", "vlan", "prefix", "ipaddress", "dnshostrecord", "dnsarecord", "dnsptrrecord"] status_map = {} location_map = {} @@ -336,14 +342,11 @@ def load_ipaddresses( elif self.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: has_fixed_address = True - # Description translates to comment for DNS records only. - # If we don't have DNS name, or we don't create DNS records, then we set description to an empty string. - if self.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: - description = "" - elif self.config.dns_record_type != DNSRecordTypeChoices.DONT_CREATE_RECORD and not ipaddr.dns_name: - description = "" - else: + # Description is used to derive name of the fixed record + if self.config.fixed_address_type != FixedAddressTypeChoices.DONT_CREATE_RECORD: description = ipaddr.description + else: + description = "" custom_fields = get_valid_custom_fields(ipaddr.custom_field_data, excluded_cfs=self.excluded_cfs) _ip = self.ipaddress( @@ -353,14 +356,11 @@ def load_ipaddresses( status=ipaddr.status.name if ipaddr.status else None, ip_addr_type=ipaddr.type, prefix_length=prefix.prefix_length if prefix else ipaddr.prefix_length, - dns_name=ipaddr.dns_name, description=description, ext_attrs={**default_cfs, **custom_fields}, mac_address=mac_address, pk=ipaddr.id, has_fixed_address=has_fixed_address, - # Fixed address name comes from Nautobot's IP Address `description` - fixed_address_name=ipaddr.description if has_fixed_address else "", # Only set fixed address comment if we create fixed addresses. fixed_address_comment=( ipaddr.custom_field_data.get("fixed_address_comment") or "" if has_fixed_address else "" @@ -372,17 +372,98 @@ def load_ipaddresses( if ipaddr.dns_name: if self.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: _ip.has_host_record = True + self._load_dns_host_record_for_ip( + ip_record=_ip, dns_name=ipaddr.dns_name, cfs=ipaddr.custom_field_data + ) elif self.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: _ip.has_a_record = True + self._load_dns_a_record_for_ip( + ip_record=_ip, dns_name=ipaddr.dns_name, cfs=ipaddr.custom_field_data + ) elif self.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: _ip.has_a_record = True _ip.has_ptr_record = True + self._load_dns_ptr_record_for_ip( + ip_record=_ip, dns_name=ipaddr.dns_name, cfs=ipaddr.custom_field_data + ) + self._load_dns_a_record_for_ip( + ip_record=_ip, dns_name=ipaddr.dns_name, cfs=ipaddr.custom_field_data + ) try: self.add(_ip) except ObjectAlreadyExists: self.job.logger.warning(f"Duplicate IP Address detected: {addr}.") + def _load_dns_host_record_for_ip(self, ip_record: NautobotIPAddress, dns_name: str, cfs: dict): + """Load the DNS Host record. + + Args: + ip_record (NautobotIPAddress): Parent IP Address record + dns_name (str): DNS Name + cfs (dict): Custom fields + """ + new_host_record = self.dnshostrecord( + address=ip_record.address, + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=ip_record.namespace, + dns_name=dns_name, + ip_addr_type=ip_record.ip_addr_type, + description=cfs.get("dns_host_record_comment") or "", + status=ip_record.status, + ext_attrs=ip_record.ext_attrs, + pk=ip_record.pk, + ) + + self.add(new_host_record) + + def _load_dns_a_record_for_ip(self, ip_record: NautobotIPAddress, dns_name: str, cfs: dict): + """Load the DNS A record. + + Args: + ip_record (NautobotIPAddress): Parent IP Address record + dns_name (str): DNS Name + cfs (dict): Custom fields + """ + new_a_record = self.dnsarecord( + address=ip_record.address, + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=ip_record.namespace, + dns_name=dns_name, + ip_addr_type=ip_record.ip_addr_type, + description=cfs.get("dns_a_record_comment") or "", + status=ip_record.status, + ext_attrs=ip_record.ext_attrs, + pk=ip_record.pk, + ) + + self.add(new_a_record) + + def _load_dns_ptr_record_for_ip(self, ip_record: NautobotIPAddress, dns_name: str, cfs: dict): + """Load the DNS PTR record. + + Args: + ip_record (NautobotIPAddress): Parent IP Address record + dns_name (str): DNS Name + cfs (dict): Custom fields + """ + new_ptr_record = self.dnsptrrecord( + address=ip_record.address, + prefix=ip_record.prefix, + prefix_length=ip_record.prefix_length, + namespace=ip_record.namespace, + dns_name=dns_name, + ip_addr_type=ip_record.ip_addr_type, + description=cfs.get("dns_ptr_record_comment") or "", + status=ip_record.status, + ext_attrs=ip_record.ext_attrs, + pk=ip_record.pk, + ) + + self.add(new_ptr_record) + def load_vlangroups(self): """Load VLAN Groups from Nautobot.""" if self.job.debug: From 1b96ba42b89f67358f0b6ab84f1c53cffd61a78a Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:11:26 +0100 Subject: [PATCH 147/229] Add A, PTR and Host models. --- .../infoblox/diffsync/models/__init__.py | 28 +++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index 143b17cbd..bbe411500 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,15 +1,39 @@ """Initialize models for Nautobot and Infoblox.""" -from .nautobot import NautobotNamespace, NautobotNetwork, NautobotIPAddress, NautobotVlanGroup, NautobotVlan -from .infoblox import InfobloxNamespace, InfobloxNetwork, InfobloxIPAddress, InfobloxVLANView, InfobloxVLAN +from .nautobot import ( + NautobotDnsARecord, + NautobotDnsHostRecord, + NautobotDnsPTRRecord, + NautobotNamespace, + NautobotNetwork, + NautobotIPAddress, + NautobotVlanGroup, + NautobotVlan, +) +from .infoblox import ( + InfobloxDnsARecord, + InfobloxDnsHostRecord, + InfobloxDnsPTRRecord, + InfobloxNamespace, + InfobloxNetwork, + InfobloxIPAddress, + InfobloxVLANView, + InfobloxVLAN, +) __all__ = [ + "NautobotDnsARecord", + "NautobotDnsHostRecord", + "NautobotDnsPTRRecord", "NautobotNamespace", "NautobotNetwork", "NautobotIPAddress", "NautobotVlanGroup", "NautobotVlan", + "InfobloxDnsARecord", + "InfobloxDnsHostRecord", + "InfobloxDnsPTRRecord", "InfobloxNamespace", "InfobloxNetwork", "InfobloxIPAddress", From a4f8026ba605d5eb323283355a745471b11e34d5 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:12:05 +0100 Subject: [PATCH 148/229] Add A, PTR and Host models. --- .../infoblox/diffsync/models/base.py | 88 +++++++++++++++++-- 1 file changed, 81 insertions(+), 7 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index b67c53024..00125297a 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -70,7 +70,6 @@ class IPAddress(DiffSyncModel): _identifiers = ("address", "prefix", "prefix_length", "namespace") _attributes = ( "description", - "dns_name", "status", "ip_addr_type", "ext_attrs", @@ -79,12 +78,10 @@ class IPAddress(DiffSyncModel): "has_ptr_record", "has_fixed_address", "mac_address", - "fixed_address_name", "fixed_address_comment", ) address: str - dns_name: str prefix: str prefix_length: int namespace: str @@ -97,12 +94,89 @@ class IPAddress(DiffSyncModel): has_ptr_record: bool = False has_fixed_address: bool = False mac_address: Optional[str] - fixed_address_name: Optional[str] fixed_address_comment: Optional[str] pk: Optional[uuid.UUID] = None - a_record_ref: Optional[str] = None - host_record_ref: Optional[str] = None - ptr_record_ref: Optional[str] = None fixed_address_ref: Optional[str] = None fixed_address_type: Optional[str] = None + + +class DnsARecord(DiffSyncModel): + """DnsARecord model for DiffSync.""" + + _modelname = "dnsarecord" + _identifiers = ("address", "prefix", "prefix_length", "namespace") + _attributes = ( + "dns_name", + "ip_addr_type", + "description", + "status", + "ext_attrs", + ) + + address: str + prefix: str + prefix_length: int + namespace: str + dns_name: str + ip_addr_type: str + description: Optional[str] + status: Optional[str] + ext_attrs: Optional[dict] + + pk: Optional[uuid.UUID] = None + ref: Optional[str] = None + + +class DnsHostRecord(DiffSyncModel): + """DnsHostRecord model for DiffSync.""" + + _modelname = "dnshostrecord" + _identifiers = ("address", "prefix", "prefix_length", "namespace") + _attributes = ( + "dns_name", + "ip_addr_type", + "description", + "status", + "ext_attrs", + ) + + address: str + prefix: str + prefix_length: int + namespace: str + dns_name: str + ip_addr_type: str + description: Optional[str] + status: Optional[str] + ext_attrs: Optional[dict] + + pk: Optional[uuid.UUID] = None + ref: Optional[str] = None + + +class DnsPTRRecord(DiffSyncModel): + """DnsPTRRecord model for DiffSync.""" + + _modelname = "dnsptrrecord" + _identifiers = ("address", "prefix", "prefix_length", "namespace") + _attributes = ( + "dns_name", + "ip_addr_type", + "description", + "status", + "ext_attrs", + ) + + address: str + prefix: str + prefix_length: int + namespace: str + dns_name: str + ip_addr_type: str + description: Optional[str] + status: Optional[str] + ext_attrs: Optional[dict] + + pk: Optional[uuid.UUID] = None + ref: Optional[str] = None From ddf9110c78f35ae114032442a9961c620de7ab82 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:12:46 +0100 Subject: [PATCH 149/229] Add A, PTR and Host models. Allow conditional deleting of some models. --- .../infoblox/diffsync/models/infoblox.py | 695 +++++++++--------- 1 file changed, 367 insertions(+), 328 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index e3e9140e3..2d9c4a63e 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -2,8 +2,21 @@ from requests.exceptions import HTTPError -from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices -from nautobot_ssot.integrations.infoblox.diffsync.models.base import IPAddress, Namespace, Network, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.choices import ( + DNSRecordTypeChoices, + FixedAddressTypeChoices, + InfobloxDeletableModelChoices, +) +from nautobot_ssot.integrations.infoblox.diffsync.models.base import ( + DnsARecord, + DnsHostRecord, + DnsPTRRecord, + IPAddress, + Namespace, + Network, + Vlan, + VlanView, +) from nautobot_ssot.integrations.infoblox.utils.diffsync import map_network_view_to_namespace, validate_dns_name @@ -83,7 +96,7 @@ def create(cls, diffsync, ids, attrs): # pylint: disable=too-many-branches ip_address = ids["address"] mac_address = attrs.get("mac_address") has_fixed_address = attrs.get("has_fixed_address", False) - fixed_address_name = attrs.get("fixed_address_name") or "" + fixed_address_name = attrs.get("description") or "" fixed_address_comment = attrs.get("fixed_address_comment") or "" if diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED and has_fixed_address: @@ -125,15 +138,165 @@ def create(cls, diffsync, ids, attrs): # pylint: disable=too-many-branches fixed_address_comment, ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): # pylint: disable=too-many-branches + """Update IP Address object in Infoblox.""" + ids = self.get_identifiers() + inf_attrs = self.get_attrs() + ip_address = ids["address"] + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + + mac_address = attrs.get("mac_address") + fixed_address_name = attrs.get("description") or "" + fixed_address_comment = attrs.get("fixed_address_comment") or "" + + # Attempt update of a fixed address if Infoblox has one already + if inf_attrs.get("has_fixed_address"): + fa_update_data = {} + if "description" in attrs: + fa_update_data["name"] = fixed_address_name + if "fixed_address_comment" in attrs: + fa_update_data["comment"] = fixed_address_comment + + if ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED + and self.fixed_address_type == "RESERVED" + and fa_update_data + ): + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated fixed address reservation, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + extra={"grouping": "update"}, + ) + elif ( + self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS + and self.fixed_address_type == "MAC_ADDRESS" + and (fa_update_data or mac_address) + ): + if mac_address: + fa_update_data["mac"] = mac_address + self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated fixed address with MAC, address: %s, network_view: %s, update data: %s", + ip_address, + network_view, + fa_update_data, + extra={"grouping": "update"}, + ) + # IP Address exists in Infoblox without Fixed Address object. Nautobot side is asking for Fixed Address so we need to create one. + elif ( + attrs.get("has_fixed_address") + and self.diffsync.config.fixed_address_type != FixedAddressTypeChoices.DONT_CREATE_RECORD + ): + if self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: + self.diffsync.conn.create_fixed_address( + ip_address=ip_address, + name=fixed_address_name, + comment=fixed_address_comment, + match_client="RESERVED", + network_view=network_view, + ) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created fixed address reservation, address: %s, name: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + network_view, + fixed_address_comment, + extra={"grouping": "update"}, + ) + elif self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: + self.diffsync.conn.create_fixed_address( + ip_address=ip_address, + name=fixed_address_name, + mac_address=mac_address, + comment=fixed_address_comment, + match_client="MAC_ADDRESS", + network_view=network_view, + ) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s, comment: %s", + ip_address, + fixed_address_name, + mac_address, + network_view, + fixed_address_comment, + extra={"grouping": "update"}, + ) + + return super().update(attrs) + + def delete(self): + """Delete Fixed Address in Infoblox.""" + if InfobloxDeletableModelChoices.FIXED_ADDRESS not in self.diffsync.config.infoblox_deletable_models: + return super().delete() + + if self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.DONT_CREATE_RECORD: + return super().delete() + + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + self.diffsync.conn.delete_fixed_address_record_by_ref(self.fixed_address_ref) + self.diffsync.job.logger.info( + "Deleted Fixed Address record in Infoblox, address: %s, network_view: %s", + self.address, + network_view, + ) + return super().delete() + + +class InfobloxNamespace(Namespace): + """Infoblox implementation of the Namespace model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Don't allow creating Network Views in Infoblox.""" + diffsync.job.logger.error( + f"Creating Network Views in Infoblox is not allowed. Nautobot Namespace: {ids['name']}" + ) + raise NotImplementedError + + def update(self, attrs): + """Don't allow updating Network Views in Infoblox.""" + self.diffsync.job.logger.error( + f"Updating Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" + ) + raise NotImplementedError + + def delete(self): + """Don't allow deleting Network Views in Infoblox.""" + self.diffsync.job.logger.error( + f"Deleting Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" + ) + raise NotImplementedError + + +class InfobloxDnsARecord(DnsARecord): + """Infoblox implementation of the DnsARecord Model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DNS A record in Infoblox.""" # DNS record not needed, we can return - if diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: + if diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, + ): return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + ip_address = ids["address"] dns_name = attrs.get("dns_name") dns_comment = attrs.get("description") if not dns_name: diffsync.job.logger.warning( - f"Cannot create Infoblox DNS record for IP Address {ip_address}. DNS name is not defined." + f"Cannot create Infoblox DNS A record for IP Address {ip_address}. DNS name is not defined." ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -142,369 +305,245 @@ def create(cls, diffsync, ids, attrs): # pylint: disable=too-many-branches diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}.") return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - if diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: - diffsync.conn.create_a_record(dns_name, ip_address, dns_comment, network_view=network_view) - if diffsync.job.debug: - diffsync.job.logger.debug( - "Created DNS A record, address: %s, dns_name: %s, network_view: %s, comment: %s", - ip_address, - dns_name, - network_view, - dns_comment, - ) - elif diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: - diffsync.conn.create_a_record(dns_name, ip_address, dns_comment, network_view=network_view) - if diffsync.job.debug: - diffsync.job.logger.debug( - "Created DNS A record, address: %s, dns_name: %s, network_view: %s, comment: %s", - ip_address, - dns_name, - network_view, - dns_comment, - ) - diffsync.conn.create_ptr_record(dns_name, ip_address, dns_comment, network_view=network_view) - if diffsync.job.debug: - diffsync.job.logger.debug( - "Created DNS PTR record, address: %s, dns_name: %s, network_view: %s, comment: %s", - ip_address, - dns_name, - network_view, - dns_comment, - ) - elif diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: - diffsync.conn.create_host_record(dns_name, ip_address, dns_comment, network_view=network_view) - if diffsync.job.debug: - diffsync.job.logger.debug( - "Created DNS Host record, address: %s, dns_name: %s, network_view: %s, comment: %s", - ip_address, - dns_name, - network_view, - dns_comment, - ) - return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - - def _ip_update_check_for_incompatible_record_types(self, attrs: dict, inf_attrs: dict, ip_address: str): - """Checks whether requested changes to the DNS records are compatible with existing Infoblox DNS objects. + diffsync.conn.create_a_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS A record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) - Args: - attrs: Changed Nautobot object attributes - inf_attrs: Infoblox objects attributes - ip_address: IP address of the record + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - Returns: - tuple (bool, str) - """ - # Infoblox Host record acts as a combined A/PTR record. - # Only allow creating/updating A and PTR record if IP Address doesn't have a corresponding Host record. - # Only allows creating/updating Host record if IP Address doesn't have a corresponding A or PTR record. - incompatible_record_types = False - incomp_msg = "" - if ( - attrs.get("has_a_record", False) - and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD - and inf_attrs["has_host_record"] - ): - incomp_msg = f"Cannot update A Record for IP Address, {ip_address}. It already has an existing Host Record." - incompatible_record_types = True - elif ( - attrs.get("has_ptr_record", False) - and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD - and inf_attrs["has_host_record"] - ): - incomp_msg = ( - f"Cannot create/update PTR Record for IP Address, {ip_address}. It already has an existing Host Record." - ) - incompatible_record_types = True - elif ( - attrs.get("has_host_record", False) - and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD - and inf_attrs["has_a_record"] - ): - incomp_msg = f"Cannot update Host Record for IP Address, {ip_address}. It already has an existing A Record." - incompatible_record_types = True - elif ( - attrs.get("has_host_record", False) - and self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD - and inf_attrs["has_ptr_record"] + def update(self, attrs): + """Update DNS A record in Infoblox.""" + # DNS record not needed, we can return + if self.diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, ): - incomp_msg = ( - f"Cannot update Host Record for IP Address, {ip_address}. It already has an existing PTR Record." - ) - incompatible_record_types = True + return super().update(attrs) - return incompatible_record_types, incomp_msg + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + dns_payload = {} + dns_comment = attrs.get("description") + if dns_comment: + dns_payload["comment"] = dns_comment + if attrs.get("dns_name"): + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(self.diffsync.conn, attrs.get("dns_name"), network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{attrs.get('dns_name')}` for IP Address {self.address}." + ) + return super().update(attrs) - def _ip_update_update_fixed_address(self, new_attrs: dict, ip_address: str, network_view: str) -> None: - """Updates fixed address record in Infoblox. Triggered by IP Address update. + dns_payload["name"] = attrs.get("dns_name") - Args: - new_attrs: Object attributes changed in Nautobot - ip_address: IP address of the fixed address - network_view: Network View of the fixed address - """ - mac_address = new_attrs.get("mac_address") - - fa_update_data = {} - if "fixed_address_name" in new_attrs: - fa_update_data["name"] = new_attrs.get("fixed_address_name") or "" - if "fixed_address_comment" in new_attrs: - fa_update_data["comment"] = new_attrs.get("fixed_address_comment") or "" - - if ( - self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED - and self.fixed_address_type == "RESERVED" - and fa_update_data - ): - self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) + if dns_payload: + self.diffsync.conn.update_a_record(ref=self.ref, data=dns_payload) if self.diffsync.job.debug: self.diffsync.job.logger.debug( - "Updated fixed address reservation, address: %s, network_view: %s, update data: %s", - ip_address, + "Updated A record, address: %s, network_view: %s, update data: %s", + self.address, network_view, - fa_update_data, - extra={"grouping": "update"}, + dns_payload, ) - elif ( - self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS - and self.fixed_address_type == "MAC_ADDRESS" - and (fa_update_data or mac_address) + + return super().update(attrs) + + def delete(self): + """Delete A Record in Infoblox.""" + if InfobloxDeletableModelChoices.DNS_A_RECORD not in self.diffsync.config.infoblox_deletable_models: + return super().delete() + + if self.diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, ): - if mac_address: - fa_update_data["mac"] = mac_address - self.diffsync.conn.update_fixed_address(ref=self.fixed_address_ref, data=fa_update_data) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Updated fixed address with MAC, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - fa_update_data, - extra={"grouping": "update"}, - ) + return super().delete() + + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + self.diffsync.conn.delete_a_record_by_ref(self.ref) + self.diffsync.job.logger.info( + "Deleted A record in Infoblox, address: %s, network_view: %s", + self.address, + network_view, + ) + return super().delete() - def _ip_update_create_fixed_address(self, new_attrs: dict, ip_address: str, network_view: str) -> None: - """Creates fixed address record in Infoblox. Triggered by IP Address update. - Args: - new_attrs: Object attributes changed in Nautobot - ip_address: IP address of the fixed address - network_view: Network View of the fixed address - """ - mac_address = new_attrs.get("mac_address") - fixed_address_name = new_attrs.get("fixed_address_name") or "" - fixed_address_comment = new_attrs.get("fixed_address_comment") or "" +class InfobloxDnsHostRecord(DnsHostRecord): + """Infoblox implementation of the DnsHostRecord Model.""" - if self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.RESERVED: - self.diffsync.conn.create_fixed_address( - ip_address=ip_address, - name=fixed_address_name, - comment=fixed_address_comment, - match_client="RESERVED", - network_view=network_view, + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DNS Host record in Infoblox.""" + # DNS record not needed, we can return + if diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + ip_address = ids["address"] + dns_name = attrs.get("dns_name") + dns_comment = attrs.get("description") + if not dns_name: + diffsync.job.logger.warning( + f"Cannot create Infoblox DNS Host record for IP Address {ip_address}. DNS name is not defined." ) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Created fixed address reservation, address: %s, name: %s, network_view: %s, comment: %s", - ip_address, - fixed_address_name, - network_view, - fixed_address_comment, - extra={"grouping": "update"}, - ) - elif self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.MAC_ADDRESS and mac_address: - self.diffsync.conn.create_fixed_address( - ip_address=ip_address, - name=fixed_address_name, - mac_address=mac_address, - comment=fixed_address_comment, - match_client="MAC_ADDRESS", - network_view=network_view, + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(diffsync.conn, dns_name, network_view): + diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}.") + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + diffsync.conn.create_host_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS Host record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, ) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Created fixed address with MAC, address: %s, name: %s, mac address: %s, network_view: %s, comment: %s", - ip_address, - fixed_address_name, - mac_address, - network_view, - fixed_address_comment, - extra={"grouping": "update"}, - ) - def _ip_update_create_or_update_dns_records( # pylint: disable=too-many-arguments,too-many-branches - self, new_attrs: dict, inf_attrs: dict, canonical_dns_name: str, ip_address: str, network_view: str - ) -> None: - """Creates or update DNS records connected to the IP address. Triggered by IP Address update. - - Args: - new_attrs: Object attributes changed in Nautobot - inf_attrs: Infoblox object attributes - canonical_dns_name: DNS name used for create operations only - ip_address: IP address for which DNS records are created - network_view: Network View of the fixed address - """ + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update DNS Host record in Infoblox.""" + # DNS record not needed, we can return + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + return super().update(attrs) + + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") dns_payload = {} - ptr_payload = {} - dns_comment = new_attrs.get("description") + dns_comment = attrs.get("description") if dns_comment: dns_payload["comment"] = dns_comment - ptr_payload["comment"] = dns_comment - if new_attrs.get("dns_name"): - dns_payload["name"] = new_attrs.get("dns_name") - ptr_payload["ptrdname"] = new_attrs.get("dns_name") - - a_record_action = ptr_record_action = host_record_action = "none" - if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_RECORD: - a_record_action = "update" if inf_attrs["has_a_record"] else "create" - elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: - a_record_action = "update" if inf_attrs["has_a_record"] else "create" - ptr_record_action = "update" if inf_attrs["has_ptr_record"] else "create" - elif self.diffsync.config.dns_record_type == DNSRecordTypeChoices.HOST_RECORD: - host_record_action = "update" if inf_attrs["has_host_record"] else "create" - - # IP Address in Infoblox is not a plain IP Address like in Nautobot. - # In Infoblox we can have one of many types of Fixed Address, Host record for IP Address, or A Record, with optional PTR, for IP Address. - # When syncing from Nautobot to Infoblox we take IP Address and check if it has dns_name field populated. - # We then combine this with the Infoblox Config toggles to arrive at the desired state in Infoblox. - comment = dns_comment or inf_attrs.get("description") - if host_record_action == "update" and dns_payload: - self.diffsync.conn.update_host_record(ref=self.host_record_ref, data=dns_payload) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Updated Host record, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - dns_payload, - extra={"grouping": "update"}, + if attrs.get("dns_name"): + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(self.diffsync.conn, attrs.get("dns_name"), network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{attrs.get('dns_name')}` for IP Address {self.address}." ) - elif host_record_action == "create": - self.diffsync.conn.create_host_record(canonical_dns_name, ip_address, comment, network_view=network_view) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Created Host record, address: %s, network_view: %s, DNS name: %s, comment: %s", - ip_address, - network_view, - canonical_dns_name, - comment, - extra={"grouping": "update"}, - ) - if a_record_action == "update" and dns_payload: - self.diffsync.conn.update_a_record(ref=self.a_record_ref, data=dns_payload) + return super().update(attrs) + + dns_payload["name"] = attrs.get("dns_name") + + if dns_payload: + self.diffsync.conn.update_host_record(ref=self.ref, data=dns_payload) if self.diffsync.job.debug: self.diffsync.job.logger.debug( - "Updated A record, address: %s, network_view: %s, update data: %s", - ip_address, + "Updated Host record, address: %s, network_view: %s, update data: %s", + self.address, network_view, dns_payload, - extra={"grouping": "update"}, - ) - elif a_record_action == "create": - self.diffsync.conn.create_a_record(canonical_dns_name, ip_address, comment, network_view=network_view) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Created A record, address: %s, network_view: %s, DNS name: %s, comment: %s", - ip_address, - network_view, - canonical_dns_name, - comment, - extra={"grouping": "update"}, - ) - if ptr_record_action == "update" and ptr_payload: - self.diffsync.conn.update_ptr_record(ref=self.ptr_record_ref, data=ptr_payload) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Updated PTR record, address: %s, network_view: %s, update data: %s", - ip_address, - network_view, - ptr_payload, - extra={"grouping": "update"}, - ) - elif ptr_record_action == "create": - self.diffsync.conn.create_ptr_record(canonical_dns_name, ip_address, comment, network_view=network_view) - if self.diffsync.job.debug: - self.diffsync.job.logger.debug( - "Created PTR record, address: %s, network_view: %s, DNS name: %s, comment: %s", - ip_address, - network_view, - canonical_dns_name, - comment, - extra={"grouping": "update"}, ) - def update(self, attrs): - """Update IP Address object in Infoblox.""" - ids = self.get_identifiers() - inf_attrs = self.get_attrs() - ip_address = ids["address"] - network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + return super().update(attrs) - # Attempt update of a fixed address if Infoblox has one already - if inf_attrs.get("has_fixed_address"): - self._ip_update_update_fixed_address(new_attrs=attrs, ip_address=ip_address, network_view=network_view) - # IP Address exists in Infoblox without Fixed Address object. Nautobot side is asking for Fixed Address so we need to create one. - elif ( - attrs.get("has_fixed_address") - and self.diffsync.config.fixed_address_type != FixedAddressTypeChoices.DONT_CREATE_RECORD - ): - self._ip_update_create_fixed_address(new_attrs=attrs, ip_address=ip_address, network_view=network_view) + def delete(self): + """Delete DNS Host record in Infoblox.""" + if InfobloxDeletableModelChoices.DNS_HOST_RECORD not in self.diffsync.config.infoblox_deletable_models: + return super().delete() + + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + return super().delete() + + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + self.diffsync.conn.delete_host_record_by_ref(self.ref) + self.diffsync.job.logger.info( + "Deleted Host record in Infoblox, address: %s, network_view: %s", + self.address, + network_view, + ) + return super().delete() + + +class InfobloxDnsPTRRecord(DnsPTRRecord): + """Infoblox implementation of the DnsPTRRecord Model.""" + @classmethod + def create(cls, diffsync, ids, attrs): + """Create PTR record in Infoblox.""" # DNS record not needed, we can return - if self.diffsync.config.dns_record_type == DNSRecordTypeChoices.DONT_CREATE_RECORD: - return super().update(attrs) + if diffsync.config.dns_record_type != DNSRecordTypeChoices.A_AND_PTR_RECORD: + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - # Nautobot side doesn't check if dns name is a fqdn. Additionally, Infoblox won't allow dns name if the zone fqdn doesn't exist. - # We get either existing DNS name, or a new one. This is because name might be the same but we might need to create a new DNS record. - canonical_dns_name = attrs.get("dns_name", inf_attrs["dns_name"]) - if not canonical_dns_name: - self.diffsync.job.logger.info( - f"Skipping DNS Infoblox record create/update for IP Address {ip_address}. DNS name is not defined." + network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") + ip_address = ids["address"] + dns_name = attrs.get("dns_name") + dns_comment = attrs.get("description") + if not dns_name: + diffsync.job.logger.warning( + f"Cannot create Infoblox PTR DNS record for IP Address {ip_address}. DNS name is not defined." ) - return super().update(attrs) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - if not validate_dns_name(self.diffsync.conn, canonical_dns_name, network_view): - self.diffsync.job.logger.warning( - f"Invalid zone fqdn in DNS name `{canonical_dns_name}` for IP Address {ip_address}" - ) - return super().update(attrs) + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(diffsync.conn, dns_name, network_view): + diffsync.job.logger.warning(f"Invalid zone fqdn in DNS name `{dns_name}` for IP Address {ip_address}.") + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - incompatible_record_types, incomp_msg = self._ip_update_check_for_incompatible_record_types( - attrs=attrs, inf_attrs=inf_attrs, ip_address=ip_address - ) - if incompatible_record_types: - self.diffsync.job.logger.warning(incomp_msg) - return super().update(attrs) + diffsync.conn.create_ptr_record(dns_name, ip_address, dns_comment, network_view=network_view) + if diffsync.job.debug: + diffsync.job.logger.debug( + "Created DNS PTR record, address: %s, dns_name: %s, network_view: %s, comment: %s", + ip_address, + dns_name, + network_view, + dns_comment, + ) - self._ip_update_create_or_update_dns_records( - new_attrs=attrs, - inf_attrs=inf_attrs, - canonical_dns_name=canonical_dns_name, - ip_address=ip_address, - network_view=network_view, - ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) - return super().update(attrs) + def update(self, attrs): + """Update PTR record in Infoblox.""" + if not self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: + return super().update(attrs) + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + dns_payload = {} + dns_comment = attrs.get("description") + if dns_comment: + dns_payload["comment"] = dns_comment + if attrs.get("dns_name"): + # Nautobot side doesn't check if dns name is a FQDN. Additionally, Infoblox won't accept DNS name if the corresponding zone FQDN doesn't exist. + if not validate_dns_name(self.diffsync.conn, attrs.get("dns_name"), network_view): + self.diffsync.job.logger.warning( + f"Invalid zone fqdn in DNS name `{attrs.get('dns_name')}` for IP Address {self.address}." + ) + return super().update(attrs) -class InfobloxNamespace(Namespace): - """Infoblox implementation of the Namespace model.""" + dns_payload["ptrdname"] = attrs.get("dns_name") - @classmethod - def create(cls, diffsync, ids, attrs): - """Don't allow creating Network Views in Infoblox.""" - diffsync.job.logger.error( - f"Creating Network Views in Infoblox is not allowed. Nautobot Namespace: {ids['name']}" - ) - raise NotImplementedError + if dns_payload: + self.diffsync.conn.update_ptr_record(ref=self.ref, data=dns_payload) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug( + "Updated PTR record, address: %s, network_view: %s, update data: %s", + self.address, + network_view, + dns_payload, + ) - def update(self, attrs): - """Don't allow updating Network Views in Infoblox.""" - self.diffsync.job.logger.error( - f"Updating Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" - ) - raise NotImplementedError + return super().update(attrs) def delete(self): - """Don't allow deleting Network Views in Infoblox.""" - self.diffsync.job.logger.error( - f"Deleting Network Views in Infoblox is not allowed. Nautobot Namespace: {self.get_identifiers()['name']}" + """Delete PTR Record in Infoblox.""" + if InfobloxDeletableModelChoices.DNS_PTR_RECORD not in self.diffsync.config.infoblox_deletable_models: + return super().delete() + + if not self.diffsync.config.dns_record_type == DNSRecordTypeChoices.A_AND_PTR_RECORD: + return super().delete() + + network_view = map_network_view_to_namespace(value=self.namespace, direction="ns_to_nv") + self.diffsync.conn.delete_ptr_record_by_ref(self.ref) + self.diffsync.job.logger.info( + "Deleted PTR record in Infoblox, address: %s, network_view: %s", + self.address, + network_view, ) - raise NotImplementedError + return super().delete() From 00f11cd7b4bac0b759bb74740887af4b91f33a1a Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:13:04 +0100 Subject: [PATCH 150/229] Add A, PTR and Host models. Allow conditional deleting of some models. --- .../infoblox/diffsync/models/nautobot.py | 460 ++++++++++++++++-- 1 file changed, 416 insertions(+), 44 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 4380fa320..07145bb20 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -13,7 +13,21 @@ from nautobot.ipam.models import Prefix as OrmPrefix from nautobot.ipam.models import VLANGroup as OrmVlanGroup -from nautobot_ssot.integrations.infoblox.diffsync.models.base import IPAddress, Namespace, Network, Vlan, VlanView +from nautobot_ssot.integrations.infoblox.choices import ( + FixedAddressTypeChoices, + DNSRecordTypeChoices, + NautobotDeletableModelChoices, +) +from nautobot_ssot.integrations.infoblox.diffsync.models.base import ( + DnsARecord, + DnsHostRecord, + DnsPTRRecord, + IPAddress, + Namespace, + Network, + Vlan, + VlanView, +) from nautobot_ssot.integrations.infoblox.utils.diffsync import ( create_tag_sync_from_infoblox, map_network_view_to_namespace, @@ -99,6 +113,60 @@ def process_ext_attrs(diffsync, obj: object, extattrs: dict): # pylint: disable obj.custom_field_data.update({_cf_dict["key"]: str(attr_value)}) +def _create_ip_address_common(diffsync: object, ids: dict, attrs: dict) -> IPAddress: + """Creates common IP Address atrributes. + + Args: + diffsync (object): diffsync adapter instance + ids (dict): IP Address identifiers + attrs (dict): IP Address attributes + + Returns: + Partially instantiated IPAddress object + """ + try: + status = diffsync.status_map[attrs["status"]] + except KeyError: + status = diffsync.config.default_status.pk + addr = f"{ids['address']}/{ids['prefix_length']}" + if attrs.get("ip_addr_type"): + if attrs["ip_addr_type"].lower() in IPAddressTypeChoices.as_dict(): + ip_addr_type = attrs["ip_addr_type"].lower() + else: + diffsync.logger.warning( + f"unable to determine IPAddress Type for {addr}, defaulting to 'Host'", + extra={"grouping": "create"}, + ) + ip_addr_type = "host" + else: + ip_addr_type = "host" + _ip = OrmIPAddress( + address=addr, + status_id=status, + type=ip_addr_type, + parent_id=diffsync.prefix_map[(ids["namespace"], ids["prefix"])], + ) + if attrs.get("ext_attrs"): + process_ext_attrs(diffsync=diffsync, obj=_ip, extattrs=attrs["ext_attrs"]) + _ip.tags.add(create_tag_sync_from_infoblox()) + + return _ip + + +def _get_ip_address_ds_key(address: object) -> tuple: + """Get IP Address key used to find out PK of the IP Address objects. + + Args: + address (object): Diffsync IPAddress object + + Returns: + tuple containing key to the dict + """ + ip_address_key = (f"{address.address}/{address.prefix_length}", address.namespace) + + return ip_address_key + + class NautobotNetwork(Network): """Nautobot implementation of the Network Model.""" @@ -205,54 +273,58 @@ class NautobotIPAddress(IPAddress): @classmethod def create(cls, diffsync, ids, attrs): - """Create IPAddress object in Nautobot.""" - try: - status = diffsync.status_map[attrs["status"]] - except KeyError: - status = diffsync.config.default_status.pk - addr = f"{ids['address']}/{ids['prefix_length']}" - if attrs.get("ip_addr_type"): - if attrs["ip_addr_type"].lower() in IPAddressTypeChoices.as_dict(): - ip_addr_type = attrs["ip_addr_type"].lower() - else: - diffsync.logger.warning(f"unable to determine IPAddress Type for {addr}, defaulting to 'Host'") - ip_addr_type = "host" - else: - ip_addr_type = "host" + """Create IPAddress object in Nautobot. Used for fixed address data only.""" + # Infoblox side doesn't have a fixed address record + if not attrs.get("has_fixed_address", False): + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + addr_w_pfxl = f"{ids['address']}/{ids['prefix_length']}" + if diffsync.config.fixed_address_type == FixedAddressTypeChoices.DONT_CREATE_RECORD: + diffsync.job.logger.warning( + f"Did not create Fixed Address {addr_w_pfxl}-{ids['namespace']}. It exists in Infoblox but Nautobot config has `fixed_address_type` set to `DONT_CREATE_RECORD`." + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + if diffsync.job.debug: - diffsync.job.logger.debug(f"Creating IP Address {addr}") - _ip = OrmIPAddress( - address=addr, - status_id=status, - type=ip_addr_type, - dns_name=attrs.get("dns_name", ""), - parent_id=diffsync.prefix_map[(ids["namespace"], ids["prefix"])], - ) - if attrs.get("ext_attrs"): - process_ext_attrs(diffsync=diffsync, obj=_ip, extattrs=attrs["ext_attrs"]) + diffsync.job.logger.debug(f"Creating IP Address {addr_w_pfxl}") + _ip = _create_ip_address_common(diffsync, ids, attrs) + _ip.description = attrs.get("description") or "" if "mac_address" in attrs: _ip.custom_field_data.update({"mac_address": attrs.get("mac_address", "")}) - if attrs.get("has_fixed_address", False) and "fixed_address_comment" in attrs: + if "fixed_address_comment" in attrs: _ip.custom_field_data.update({"fixed_address_comment": attrs.get("fixed_address_comment") or ""}) - # Fixed address name takes precedence over DNS comment field, and is recorded in the description field of Nautobot IP Address. - if attrs.get("has_fixed_address", False) and "fixed_address_name" in attrs: - _ip.description = attrs.get("fixed_address_name") or "" - else: - _ip.description = attrs.get("description") or "" + try: - _ip.tags.add(create_tag_sync_from_infoblox()) _ip.validated_save() - diffsync.ipaddr_map[(_ip.address, ids["namespace"])] = _ip.id - diffsync.ipaddr_map[_ip.address] = _ip.id + diffsync.ipaddr_map[(f"{addr_w_pfxl}", ids["namespace"])] = _ip.id return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except ValidationError as err: - diffsync.job.logger.warning( - f"Error with validating IP Address {ids['address']}/{ids['prefix_length']}-{ids['namespace']}. {err}" - ) + diffsync.job.logger.warning(f"Error with validating IP Address {addr_w_pfxl}-{ids['namespace']}. {err}") return None - def update(self, attrs): + def update(self, attrs): # pylint: disable=too-many-branches """Update IPAddress object in Nautobot.""" + # Description field should only be used by Fixed Address. + # If description is cleared in Infoblox diffsync record it either means fixed address is gone or name was removed. + # Either way we clear the field in Nautobot even if DONT_CREATE_RECORD is set. + if attrs.get("description") == "" and FixedAddressTypeChoices.DONT_CREATE_RECORD: + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + _ipaddr.description = attrs["description"] + _ipaddr.custom_field_data.update({"fixed_address_comment": attrs.get("fixed_address_comment") or ""}) + try: + _ipaddr.validated_save() + return super().update(attrs) + except ValidationError as err: + self.diffsync.job.logger.warning(f"Error with updating IP Address {self.address}. {err}") + return None + + if self.diffsync.config.fixed_address_type == FixedAddressTypeChoices.DONT_CREATE_RECORD: + self.diffsync.job.logger.warning( + f"Did not update Fixed Address {self.address}/{self.prefix_length}-{self.namespace}. " # nosec: B608 + "It exists in Infoblox but Nautobot config has `fixed_address_type` set to `DONT_CREATE_RECORD`." + ) + return super().update(attrs) + _ipaddr = OrmIPAddress.objects.get(id=self.pk) if attrs.get("status"): try: @@ -265,13 +337,8 @@ def update(self, attrs): _ipaddr.type = attrs["ip_addr_type"].lower() else: _ipaddr.type = "host" - # Fixed Address name takes precedence when filling out `description` field - if attrs.get("fixed_address_name"): - _ipaddr.description = attrs.get("fixed_address_name") or "" - elif attrs.get("description"): + if attrs.get("description"): _ipaddr.description = attrs["description"] - if attrs.get("dns_name"): - _ipaddr.dns_name = attrs["dns_name"] if "ext_attrs" in attrs: process_ext_attrs(diffsync=self.diffsync, obj=_ipaddr, extattrs=attrs["ext_attrs"]) if "mac_address" in attrs: @@ -285,6 +352,16 @@ def update(self, attrs): self.diffsync.job.logger.warning(f"Error with updating IP Address {self.address}. {err}") return None + def delete(self): + """Delete IPAddress object in Nautobot.""" + if NautobotDeletableModelChoices.IP_ADDRESS not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + del self.diffsync.ipaddr_map[_get_ip_address_ds_key(self)] + _ipaddr.delete() + return super().delete() + class NautobotVlanGroup(VlanView): """Nautobot implementation of the VLANView model.""" @@ -311,6 +388,9 @@ def update(self, attrs): def delete(self): """Delete VLANGroup object in Nautobot.""" + if NautobotDeletableModelChoices.VLAN_GROUP not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + self.diffsync.job.logger.warning(f"VLAN Group {self.name} will be deleted.") _vg = OrmVlanGroup.objects.get(id=self.pk) _vg.delete() @@ -374,6 +454,9 @@ def update(self, attrs): def delete(self): """Delete VLAN object in Nautobot.""" + if NautobotDeletableModelChoices.VLAN not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + self.diffsync.job.logger.warning(f"VLAN {self.vid} will be deleted.") _vlan = OrmVlan.objects.get(id=self.pk) _vlan.delete() @@ -418,3 +501,292 @@ def delete(self): f"Deleting Namespaces in Nautobot is not allowed. Infoblox Network View: {self.get_identifiers()['name']}" ) raise NotImplementedError + + +class NautobotDnsARecord(DnsARecord): + """Nautobot implementation of the DnsARecord Model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create A Record data on IPAddress object in Nautobot.""" + addr_w_pfxl = f"{ids['address']}/{ids['prefix_length']}" + + if diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, + ): + diffsync.job.logger.warning( + f"Can't create/update A record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. Nautobot config is not set for A record operations." # nosec: B608 + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + ip_pk = diffsync.ipaddr_map.get((addr_w_pfxl, ids["namespace"])) + if ip_pk: + if diffsync.job.debug: + diffsync.job.logger.debug( + f"Adding A record data to an existing IP Address: {addr_w_pfxl}-{ids['namespace']}." + ) + _ipaddr = OrmIPAddress.objects.get(id=ip_pk) + _ipaddr.dns_name = attrs.get("dns_name") or "" + _ipaddr.custom_field_data.update({"dns_a_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + except ValidationError as err: + diffsync.job.logger.warning( + f"Error with updating A record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. {err}" + ) + return None + else: + if diffsync.job.debug: + diffsync.job.logger.debug(f"Creating IP Address from A record data: {addr_w_pfxl}-{ids['namespace']}.") + try: + _ipaddr = _create_ip_address_common(diffsync, ids, attrs) + _ipaddr.dns_name = attrs.get("dns_name") or "" + _ipaddr.custom_field_data.update({"dns_a_record_comment": attrs.get("description") or ""}) + _ipaddr.validated_save() + diffsync.ipaddr_map[(addr_w_pfxl, ids["namespace"])] = _ipaddr.id + except ValidationError as err: + diffsync.job.logger.warning( + f"Error with creating IP Address from A record data: {addr_w_pfxl}-{ids['namespace']}. {err}" + ) + return None + + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update A Record data on IPAddress object in Nautobot.""" + if self.diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, + ): + self.diffsync.job.logger.warning( + f"Can't update A record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for A record operations." # nosec: B608 + ) + return super().update(attrs) + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + if attrs.get("dns_name"): + _ipaddr.dns_name = attrs["dns_name"] + if "description" in attrs: + _ipaddr.custom_field_data.update({"dns_a_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + return super().update(attrs) + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with updating A record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None + + def delete(self): + """Delete A Record data on IPAddress object in Nautobot.""" + if NautobotDeletableModelChoices.DNS_A_RECORD not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + + if self.diffsync.config.dns_record_type not in ( + DNSRecordTypeChoices.A_RECORD, + DNSRecordTypeChoices.A_AND_PTR_RECORD, + ): + self.diffsync.job.logger.warning( + f"Can't delete A record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for A record operations." + ) + return super().delete() + + # Parent record has been already deleted + if _get_ip_address_ds_key(self) not in self.diffsync.ipaddr_map: + return super().delete() + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + _ipaddr.dns_name = "" + _ipaddr.custom_field_data.update({"dns_a_record_comment": ""}) + try: + _ipaddr.validated_save() + return super().delete() + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with deleting A record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None + + +class NautobotDnsHostRecord(DnsHostRecord): + """Nautobot implementation of the DnsHostRecord Model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Host Record data on IPAddress object in Nautobot.""" + addr_w_pfxl = f"{ids['address']}/{ids['prefix_length']}" + + if diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + diffsync.job.logger.warning( + f"Can't create/update Host record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. Nautobot config is not set for Host record operations." # nosec: B608 + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + ip_pk = diffsync.ipaddr_map.get((addr_w_pfxl, ids["namespace"])) + if ip_pk: + if diffsync.job.debug: + diffsync.job.logger.debug( + f"Adding Host record data to an existing IP Address: {addr_w_pfxl}-{ids['namespace']}." + ) + _ipaddr = OrmIPAddress.objects.get(id=ip_pk) + _ipaddr.dns_name = attrs.get("dns_name") or "" + _ipaddr.custom_field_data.update({"dns_host_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + except ValidationError as err: + diffsync.job.logger.warning( + f"Error with updating Host record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. {err}" + ) + return None + else: + if diffsync.job.debug: + diffsync.job.logger.debug( + f"Creating IP Address from Host record data: {addr_w_pfxl}-{ids['namespace']}." + ) + try: + _ipaddr = _create_ip_address_common(diffsync, ids, attrs) + _ipaddr.dns_name = attrs.get("dns_name") or "" + _ipaddr.custom_field_data.update({"dns_host_record_comment": attrs.get("description") or ""}) + _ipaddr.validated_save() + diffsync.ipaddr_map[(addr_w_pfxl, ids["namespace"])] = _ipaddr.id + except ValidationError as err: + diffsync.job.logger.warning( + f"Error with creating IP Address from Host record data: {addr_w_pfxl}-{ids['namespace']}. {err}" + ) + return None + + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update Host Record data on IPAddress object in Nautobot.""" + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + self.diffsync.job.logger.warning( + f"Can't update Host record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for Host record operations." # nosec: B608 + ) + return super().update(attrs) + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + if "dns_name" in attrs: + _ipaddr.dns_name = attrs["dns_name"] + if "description" in attrs: + _ipaddr.custom_field_data.update({"dns_host_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + return super().update(attrs) + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with updating Host record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None + + def delete(self): + """Delete Host Record data on IPAddress object in Nautobot.""" + if NautobotDeletableModelChoices.DNS_HOST_RECORD not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.HOST_RECORD: + self.diffsync.job.logger.warning( + f"Can't delete Host record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for Host record operations." + ) + return super().delete() + + # Parent record has been already deleted + if _get_ip_address_ds_key(self) not in self.diffsync.ipaddr_map: + return super().delete() + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + _ipaddr.dns_name = "" + _ipaddr.custom_field_data.update({"dns_host_record_comment": ""}) + try: + _ipaddr.validated_save() + return super().delete() + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with deleting Host record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None + + +class NautobotDnsPTRRecord(DnsPTRRecord): + """Nautobot implementation of the DnsPTRRecord Model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create PTR Record data on IPAddress object in Nautobot.""" + addr_w_pfxl = f"{ids['address']}/{ids['prefix_length']}" + + if diffsync.config.dns_record_type != DNSRecordTypeChoices.A_AND_PTR_RECORD: + diffsync.job.logger.warning( + f"Can't create/update PTR record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. Nautobot config is not set for PTR record operations." # nosec: B608 + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + ip_pk = diffsync.ipaddr_map.get((addr_w_pfxl, ids["namespace"])) + if ip_pk: + if diffsync.job.debug: + diffsync.job.logger.debug( + f"Adding PTR record data to an existing IP Address: {addr_w_pfxl}-{ids['namespace']}." + ) + _ipaddr = OrmIPAddress.objects.get(id=ip_pk) + _ipaddr.dns_name = attrs.get("dns_name") or "" + _ipaddr.custom_field_data.update({"dns_ptr_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + except ValidationError as err: + diffsync.job.logger.warning( + f"Error with updating PTR record data for IP Address: {addr_w_pfxl}-{ids['namespace']}. {err}" + ) + return None + else: + # We don't allow creating IPs from PTR record only + diffsync.job.logger.warning( + f"Can't create PTR record on its own. Associated A record must be created for IP Address: {addr_w_pfxl}-{ids['namespace']}." + ) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update PTR Record data on IPAddress object in Nautobot.""" + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.A_AND_PTR_RECORD: + self.diffsync.job.logger.warning( + f"Can't update PTR record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for PTR record operations." # nosec: B608 + ) + return super().update(attrs) + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + if "description" in attrs: + _ipaddr.custom_field_data.update({"dns_ptr_record_comment": attrs.get("description") or ""}) + try: + _ipaddr.validated_save() + return super().update(attrs) + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with updating PTR record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None + + def delete(self): + """Delete PTR Record data on IPAddress object in Nautobot.""" + if NautobotDeletableModelChoices.DNS_PTR_RECORD not in self.diffsync.config.nautobot_deletable_models: + return super().delete() + + if self.diffsync.config.dns_record_type != DNSRecordTypeChoices.A_AND_PTR_RECORD: + self.diffsync.job.logger.warning( + f"Can't delete PTR record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. Nautobot config is not set for PTR record operations." + ) + return super().delete() + + # Parent record has been already deleted + if _get_ip_address_ds_key(self) not in self.diffsync.ipaddr_map: + return super().delete() + + _ipaddr = OrmIPAddress.objects.get(id=self.pk) + _ipaddr.custom_field_data.update({"dns_ptr_record_comment": ""}) + try: + _ipaddr.validated_save() + return super().delete() + except ValidationError as err: + self.diffsync.job.logger.warning( + f"Error with deleting PTR record data for IP Address: {self.address}/{self.prefix_length}-{self.namespace}. {err}" + ) + return None From a62ee55490e469f9e50a939a28b92a023f60850f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:15:45 +0100 Subject: [PATCH 151/229] Add fields for defining deletable infoblox and nautobot models. --- nautobot_ssot/integrations/infoblox/forms.py | 21 ++++++++++++++++-- nautobot_ssot/integrations/infoblox/models.py | 3 +++ .../ssotinfobloxconfig_retrieve.html | 22 +++++++++++++------ .../ssotinfobloxconfig_update.html | 4 +++- 4 files changed, 40 insertions(+), 10 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/forms.py b/nautobot_ssot/integrations/infoblox/forms.py index af0daeade..ae4566824 100644 --- a/nautobot_ssot/integrations/infoblox/forms.py +++ b/nautobot_ssot/integrations/infoblox/forms.py @@ -3,10 +3,15 @@ from django import forms from nautobot.extras.forms import NautobotModelForm, NautobotFilterForm -from nautobot.apps.forms import JSONField, StaticSelect2 +from nautobot.apps.forms import add_blank_choice, JSONField, StaticSelect2, StaticSelect2Multiple from .models import SSOTInfobloxConfig -from .choices import FixedAddressTypeChoices, DNSRecordTypeChoices +from .choices import ( + FixedAddressTypeChoices, + DNSRecordTypeChoices, + InfobloxDeletableModelChoices, + NautobotDeletableModelChoices, +) class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-ancestors @@ -36,6 +41,18 @@ class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-anc required=True, widget=StaticSelect2(), ) + infoblox_deletable_models = forms.MultipleChoiceField( + required=False, + label="Models that can be deleted in Infoblox", + choices=add_blank_choice(InfobloxDeletableModelChoices), + widget=StaticSelect2Multiple(), + ) + nautobot_deletable_models = forms.MultipleChoiceField( + required=False, + label="Models that can be deleted in Nautobot", + choices=add_blank_choice(NautobotDeletableModelChoices), + widget=StaticSelect2Multiple(), + ) class Meta: """Meta attributes for the SSOTInfobloxConfigForm class.""" diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py index a98d47975..8838ac86f 100644 --- a/nautobot_ssot/integrations/infoblox/models.py +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -99,7 +99,10 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors job_enabled = models.BooleanField( default=False, verbose_name="Enabled for Sync Job", + help_text="Enable use of this configuration in the sync jobs.", ) + infoblox_deletable_models = models.JSONField(encoder=DjangoJSONEncoder, default=list, blank=True) + nautobot_deletable_models = models.JSONField(encoder=DjangoJSONEncoder, default=list, blank=True) class Meta: """Meta class for SSOTInfobloxConfig.""" diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html index cc13df31c..a20a317b2 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -44,28 +44,32 @@

    Infoblox WAPI Version {{ object.infoblox_wapi_version|placeholder }} + + Can be used in Sync Job + {{ object.job_enabled }} + Enable Sync from Nautobot to Infoblox {{ object.enable_sync_to_infoblox }} - Import IP Addresses from Infoblox + Import IP Addresses {{ object.import_ip_addresses }} - Import VLANs from Infoblox + Import VLANs {{ object.import_vlans }} - Import VLAN Views from Infoblox + Import VLAN Views {{ object.import_vlan_views }} - Import IPv4 from Infoblox + Import IPv4 {{ object.import_ipv4 }} - Import IPv6 from Infoblox + Import IPv6 {{ object.import_ipv6 }} @@ -77,8 +81,12 @@

    {{ object.dns_record_type }} - Can be used in Sync Job - {{ object.job_enabled }} + Infoblox - deletable models + {{ object.infoblox_deletable_models }} + + + Nautobot - deletable models + {{ object.nautobot_deletable_models }}

    diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html index 718e89b1d..5cfbb2079 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -9,6 +9,7 @@ {% render_field form.description %} {% render_field form.infoblox_instance %} {% render_field form.infoblox_wapi_version %} + {% render_field form.job_enabled %} {% render_field form.enable_sync_to_infoblox %} {% render_field form.import_ip_addresses %} {% render_field form.import_subnets %} @@ -18,8 +19,9 @@ {% render_field form.import_ipv6 %} {% render_field form.fixed_address_type %} {% render_field form.dns_record_type %} + {% render_field form.infoblox_deletable_models %} + {% render_field form.nautobot_deletable_models %} {% render_field form.default_status %} - {% render_field form.job_enabled %}
    From 4574b00a5584827946f2240b8556acd7aac0ffa5 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:17:04 +0100 Subject: [PATCH 152/229] Remove SKIP_UNMATCHED_DST flag. --- nautobot_ssot/integrations/infoblox/jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index a32182bf8..a4e3fb443 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -56,7 +56,7 @@ class InfobloxDataSource(DataSource): def __init__(self): """Initialize InfobloxDataSource.""" super().__init__() - self.diffsync_flags = DiffSyncFlags.CONTINUE_ON_FAILURE | DiffSyncFlags.SKIP_UNMATCHED_DST + self.diffsync_flags = DiffSyncFlags.CONTINUE_ON_FAILURE class Meta: # pylint: disable=too-few-public-methods """Information about the Job.""" @@ -119,7 +119,7 @@ class InfobloxDataTarget(DataTarget): def __init__(self): """Initialize InfobloxDataTarget.""" super().__init__() - self.diffsync_flags = DiffSyncFlags.CONTINUE_ON_FAILURE | DiffSyncFlags.SKIP_UNMATCHED_DST + self.diffsync_flags = DiffSyncFlags.CONTINUE_ON_FAILURE class Meta: # pylint: disable=too-few-public-methods """Information about the Job.""" From 13aae538a262b041410087f11f690cc25f2adf79 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:17:39 +0100 Subject: [PATCH 153/229] Add custom fields for DNS record comments. --- .../integrations/infoblox/signals.py | 45 +++++++++++++++---- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index 807777d36..28b8783ed 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -24,7 +24,9 @@ def register_signals(sender): nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) -def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument,too-many-locals +def nautobot_database_ready_callback( + sender, *, apps, **kwargs +): # pylint: disable=unused-argument,too-many-locals,too-many-statements """Create Tag and CustomField to note System of Record for SSoT. Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. @@ -84,14 +86,14 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa ) range_custom_field.content_types.add(ContentType.objects.get_for_model(Prefix)) - mac_address_comment_custom_field, _ = CustomField.objects.get_or_create( + mac_address_custom_field, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_TEXT, key="mac_address", defaults={ "label": "MAC Address", }, ) - mac_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + mac_address_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) fixed_address_comment_custom_field, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_TEXT, @@ -102,6 +104,33 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa ) fixed_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + dns_a_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_a_record_comment", + defaults={ + "label": "DNS A Record Comment", + }, + ) + dns_a_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_host_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_host_record_comment", + defaults={ + "label": "DNS Host Record Comment", + }, + ) + dns_host_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_ptr_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_ptr_record_comment", + defaults={ + "label": "DNS PTR Record Comment", + }, + ) + dns_ptr_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + # add Prefix -> VLAN Relationship relationship_dict = { "label": "Prefix -> VLAN", @@ -130,16 +159,16 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa infoblox_sync_filters = _get_sync_filters() - secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTMigration") + secrets_group, _ = SecretsGroup.objects.get_or_create(name="InfobloxSSOTDefaultSecretGroup") infoblox_username, _ = Secret.objects.get_or_create( - name="Infoblox Username - SSOT Migration", + name="Infoblox Username - Default", defaults={ "provider": "environment-variable", "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_USERNAME"}, }, ) infoblox_password, _ = Secret.objects.get_or_create( - name="Infoblox Password - SSOT Migration", + name="Infoblox Password - Default", defaults={ "provider": "environment-variable", "parameters": {"variable": "NAUTOBOT_SSOT_INFOBLOX_PASSWORD"}, @@ -162,7 +191,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa }, ) external_integration, _ = ExternalIntegration.objects.get_or_create( - name="MigratedInfobloxInstance", + name="DefaultInfobloxInstance", defaults={ "remote_url": str(config.get("infoblox_url", "https://replace.me.local")), "secrets_group": secrets_group, @@ -173,7 +202,7 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa SSOTInfobloxConfig.objects.create( name="InfobloxConfigDefault", - description="Config generated from the migrated legacy settings.", + description="Auto-generated default configuration.", default_status=default_status, infoblox_wapi_version=str(config.get("infoblox_wapi_version", "v2.12")), infoblox_instance=external_integration, From 80f2e80dd82472c155d7ff9d9edf6e334f1f4e83 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:18:10 +0100 Subject: [PATCH 154/229] Add methods for deleting DNS records. --- .../integrations/infoblox/utils/client.py | 76 ++++++++++++++++++- 1 file changed, 74 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 7cdaa23c6..e72e392bd 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -706,7 +706,7 @@ def get_host_record_by_ref(self, ref: str): """ url_path = f"{ref}" params = { - "_return_fields": "name,view,ipv4addr,comment", + "_return_fields": "name,view,ipv4addrs,comment", } response = self._request("GET", path=url_path, params=params) logger.error(response.text) @@ -812,7 +812,7 @@ def get_a_record_by_ref(self, ref: str): """ url_path = f"{ref}" params = { - "_return_fields": "name,view,ipv4addr,comment", + "_return_fields": "name,view,ipv4addr,comment,extattrs", } response = self._request("GET", path=url_path, params=params) logger.error(response.text) @@ -823,6 +823,24 @@ def get_a_record_by_ref(self, ref: str): logger.error(response.text) return response.text + def delete_a_record_by_ref(self, ref): + """Delete DNS A record by ref. + + Args: + ref (str): reference to the DNS A record + + Returns: + (dict) deleted DNS A record. + + Returns Response: + {"deleted": "record:a/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default"} + """ + self._delete(ref) + response = {"deleted": ref} + + logger.debug(response) + return response + def get_ptr_record_by_ref(self, ref: str): """Get the PTR record by FQDN. @@ -937,6 +955,24 @@ def get_ptr_record_by_name(self, fqdn, network_view: Optional[str] = None): logger.error(response.text) return response.text + def delete_ptr_record_by_ref(self, ref): + """Delete DNS PTR record by ref. + + Args: + ref (str): reference to the DNS PTR record + + Returns: + (dict) deleted DNS PTR record. + + Returns Response: + {"deleted": "record:ptr/ZG5zLmJpbmRfYSQuX2RlZmF1bHQudGVzdCx0ZXN0ZGV2aWNlMSwxMC4yMjAuMC4xMDE:testdevice1.test/default"} + """ + self._delete(ref) + response = {"deleted": ref} + + logger.debug(response) + return response + def get_all_dns_views(self): """Get all dns views. @@ -1393,6 +1429,24 @@ def get_fixed_address_by_ref(self, ref: str): logger.error(response.text) return response.text + def delete_fixed_address_record_by_ref(self, ref): + """Delete Fixed Address record by ref. + + Args: + ref (str): reference to the fixed address record + + Returns: + (dict) deleted fixed address record. + + Returns Response: + {"deleted": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev"} + """ + self._delete(ref) + response = {"deleted": ref} + + logger.debug(response) + return response + def reserve_fixed_address(self, network, mac_address, network_view: Optional[str] = None): """Reserve the next available ip address for a given network range. @@ -1584,6 +1638,24 @@ def delete_host_record(self, ip_address, network_view: Optional[str] = None): logger.debug(response) return response + def delete_host_record_by_ref(self, ref): + """Delete DNS Host record by ref. + + Args: + ref (str): reference to the DNS Host record + + Returns: + (dict) deleted DNS Host record. + + Returns Response: + {"deleted": "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default"} + """ + self._delete(ref) + response = {"deleted": ref} + + logger.debug(response) + return response + def create_ptr_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create a PTR record for a given FQDN. From 378fe96ba122d218cdf555762601276cdf9122b1 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:19:23 +0100 Subject: [PATCH 155/229] Add cfs to exclusion lists. --- .../integrations/infoblox/utils/diffsync.py | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/diffsync.py b/nautobot_ssot/integrations/infoblox/utils/diffsync.py index 5847e1a22..d96e4c7ba 100644 --- a/nautobot_ssot/integrations/infoblox/utils/diffsync.py +++ b/nautobot_ssot/integrations/infoblox/utils/diffsync.py @@ -98,7 +98,15 @@ def get_valid_custom_fields(cfs: dict, excluded_cfs: Optional[list] = None): """ if excluded_cfs is None: excluded_cfs = [] - default_excluded_cfs = ["dhcp_ranges", "fixed_address_comment", "mac_address", "ssot_synced_to_infoblox"] + default_excluded_cfs = [ + "dhcp_ranges", + "dns_a_record_comment", + "dns_host_record_comment", + "dns_ptr_record_comment", + "fixed_address_comment", + "mac_address", + "ssot_synced_to_infoblox", + ] excluded_cfs.extend(default_excluded_cfs) valid_cfs = {} for cf_name, val in cfs.items(): @@ -122,7 +130,15 @@ def get_default_custom_fields(cf_contenttype: ContentType, excluded_cfs: Optiona excluded_cfs = [] customfields = CustomField.objects.filter(content_types=cf_contenttype) # These cfs are always excluded - default_excluded_cfs = ["dhcp_ranges", "fixed_address_comment", "mac_address", "ssot_synced_to_infoblox"] + default_excluded_cfs = [ + "dhcp_ranges", + "dns_a_record_comment", + "dns_host_record_comment", + "dns_ptr_record_comment", + "fixed_address_comment", + "mac_address", + "ssot_synced_to_infoblox", + ] # User defined excluded cfs excluded_cfs.extend(default_excluded_cfs) default_cfs = {} From 2c585f6212797e7a0c8bcd09228e95141da1f94d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:19:47 +0100 Subject: [PATCH 156/229] Add deletable models fields. --- .../migrations/0009_ssotconfig_ssotinfobloxconfig.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py index a5dafbca6..6d4e7de70 100644 --- a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py +++ b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-06-04 16:04 +# Generated by Django 3.2.23 on 2024-06-13 18:08 import django.core.serializers.json from django.db import migrations, models @@ -74,6 +74,14 @@ class Migration(migrations.Migration): ("dns_record_type", models.CharField(default="create-host-record", max_length=255)), ("fixed_address_type", models.CharField(default="do-not-create-record", max_length=255)), ("job_enabled", models.BooleanField(default=False)), + ( + "infoblox_deletable_models", + models.JSONField(blank=True, default=list, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ( + "nautobot_deletable_models", + models.JSONField(blank=True, default=list, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), ("default_status", models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to="extras.status")), ( "infoblox_instance", From 64b4073b091f5dee55c76c84b8cb2bfab2f41cd6 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:20:03 +0100 Subject: [PATCH 157/229] Update fixtures. --- .../tests/infoblox/fixtures/get_fixed_address_by_ref.json | 1 + nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json | 1 + 2 files changed, 2 insertions(+) diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json index 078e774d5..b3d8027f8 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_fixed_address_by_ref.json @@ -3,6 +3,7 @@ "extattrs": { }, + "ipv4addr": "10.0.0.2", "mac": "52:1f:83:d4:9a:2e", "name": "host-fixed1", "network": "10.0.0.0/24", diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json b/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json index 52b63c16d..f04532ddd 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_host_by_ref.json @@ -1,5 +1,6 @@ { "_ref": "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default", + "ipv4addr": "10.220.0.101", "ipv4addrs": [ { "_ref": "record:host_ipv4addr/ZG5zLmhvc3RfYWRkcmVzcyQuX2RlZmF1bHQudGVzdC50ZXN0ZGV2aWNlMS4xMC4yMjAuMC4xMDEu:10.220.0.101/testdevice1.test/default", From 4fa4d4a4c5277ec5fedd5f45b62244ddca709326 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 13 Jun 2024 19:20:19 +0100 Subject: [PATCH 158/229] Update tests. --- .../tests/infoblox/test_infoblox_adapter.py | 338 ++- .../tests/infoblox/test_infoblox_models.py | 1983 +++++------------ .../tests/infoblox/test_nautobot_adapter.py | 52 + .../tests/infoblox/test_nautobot_models.py | 589 ++++- .../tests/infoblox/test_tags_and_cfs.py | 7 +- 5 files changed, 1499 insertions(+), 1470 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 073bde971..48f23252f 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -2,6 +2,7 @@ import unittest +from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from .fixtures_infoblox import create_default_infoblox_config @@ -11,7 +12,7 @@ class TestInfobloxAdapter(unittest.TestCase): """Test cases for InfobloxAdapter.""" def setUp(self): - config = create_default_infoblox_config() + self.config = create_default_infoblox_config() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: @@ -19,7 +20,7 @@ def setUp(self): job=unittest.mock.Mock(), sync=unittest.mock.Mock(), conn=mock_client, - config=config, + config=self.config, ) @unittest.mock.patch( @@ -337,3 +338,336 @@ def test_load_prefixes_ipv6_subnets( self.assertEqual(ipv6_subnet.ext_attrs, {"attr1": "data"}) self.assertEqual(ipv6_subnet.vlans, {}) self.assertEqual(ipv6_subnet.ranges, []) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", + autospec=True, + return_value={}, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_ext_attr_dict", + autospec=True, + side_effect=[{}], + ) + def test_load_ip_addresses_fixed_only( + self, + mock_extra_attr_dict, + mock_default_extra_attrs, + ): + """Test loading IP Addresses with one fixed address only.""" + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter( + job=unittest.mock.Mock(), + sync=unittest.mock.Mock(), + conn=mock_client, + config=self.config, + ) + infoblox_adapter.conn.get_ipaddr_status.return_value = "Active" + infoblox_adapter.conn.get_all_ipv4address_networks.side_effect = [ + [ + { + "_ref": "ipv4address/Li5pcHY0X2FkZHJlc3MkMTAuMjIwLjAuMTAwLzA:10.220.0.100", + "extattrs": {"Usage": {"value": "TACACS"}}, + "ip_address": "10.0.0.2", + "is_conflict": "false", + "lease_state": "FREE", + "mac_address": "", + "names": [], + "network": "10.0.0.0/24", + "network_view": "dev", + "objects": ["fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev"], + "status": "USED", + "types": ["RESERVATION"], + "usage": ["DHCP"], + }, + ] + ] + infoblox_adapter.conn.get_fixed_address_by_ref.return_value = { + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.2/dev", + "ipv4addr": "10.0.0.2", + "extattrs": {}, + "name": "fa-server1", + "comment": "fa server", + "network": "10.0.0.0/24", + "network_view": "dev", + } + infoblox_adapter.load_ipaddresses() + ip_address = infoblox_adapter.get( + "ipaddress", + {"address": "10.0.0.2", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + + self.assertEqual("10.0.0.2", ip_address.address) + self.assertEqual("10.0.0.0/24", ip_address.prefix) + self.assertEqual(24, ip_address.prefix_length) + self.assertEqual("dev", ip_address.namespace) + self.assertEqual("fa-server1", ip_address.description) + self.assertEqual("dhcp", ip_address.ip_addr_type) + self.assertEqual({}, ip_address.ext_attrs) + self.assertEqual("", ip_address.mac_address) + self.assertEqual("fa server", ip_address.fixed_address_comment) + self.assertEqual(False, ip_address.has_a_record) + self.assertEqual(False, ip_address.has_ptr_record) + self.assertEqual(False, ip_address.has_host_record) + + mock_default_extra_attrs.assert_called_once() + self.assertEqual(mock_extra_attr_dict.call_count, 1) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", + autospec=True, + return_value={}, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_ext_attr_dict", + autospec=True, + side_effect=[{}, {}, {}], + ) + def test_load_ip_addresses_fixed_dns_a_dns_ptr( # pylint: disable=too-many-statements + self, + mock_extra_attr_dict, + mock_default_extra_attrs, + ): + """Test loading IP Addresses with one fixed address, one A record and one PTR record.""" + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter( + job=unittest.mock.Mock(), + sync=unittest.mock.Mock(), + conn=mock_client, + config=self.config, + ) + infoblox_adapter.conn.get_ipaddr_status.return_value = "Active" + infoblox_adapter.conn.get_all_ipv4address_networks.side_effect = [ + [ + { + "_ref": "ipv4address/Li5pcHY0X2FkZHJlc3MkMTAuMC4wLjQvMg:10.0.0.4/dev", + "ip_address": "10.0.0.4", + "is_conflict": "false", + "mac_address": "", + "names": ["fa1 add", "server11.nautobot.local.test"], + "network": "10.0.0.0/24", + "network_view": "dev", + "objects": [ + "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.4/dev", + "record:a/ZG5zLmJpbmRfYSQuMi50ZXN0LmxvY2FsLm5hdXRvYm90LHNlcnZlcjExLDEwLjAuMC40:server11.nautobot.local.test/default.dev", + "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuNC5zZXJ2ZXIxMS5uYXV0b2JvdC5sb2NhbC50ZXN0:4.0.0.10.in-addr.arpa/default.dev", + ], + "status": "USED", + "types": ["RESERVATION", "A", "PTR"], + "usage": ["DHCP", "DNS"], + } + ] + ] + infoblox_adapter.conn.get_fixed_address_by_ref.return_value = { + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.4/dev", + "ipv4addr": "10.0.0.4", + "extattrs": {}, + "name": "fa-server1", + "comment": "fa server", + "network": "10.0.0.0/24", + "network_view": "dev", + } + infoblox_adapter.conn.get_a_record_by_ref.return_value = { + "_ref": "record:a/ZG5zLmJpbmRfYSQuMi50ZXN0LmxvY2FsLm5hdXRvYm90LHNlcnZlcjExLDEwLjAuMC40:server11.nautobot.local.test/default.dev", + "ipv4addr": "10.0.0.4", + "name": "server11.nautobot.local.test", + "comment": "a record comment", + "view": "default", + } + infoblox_adapter.conn.get_ptr_record_by_ref.return_value = { + "_ref": "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuNC5zZXJ2ZXIxMS5uYXV0b2JvdC5sb2NhbC50ZXN0:4.0.0.10.in-addr.arpa/default.dev", + "ipv4addr": "10.0.0.4", + "ipv6addr": "", + "name": "4.0.0.10.in-addr.arpa", + "ptrdname": "server11.nautobot.local.test", + "comment": "ptr record comment", + "view": "default.dev", + } + infoblox_adapter.load_ipaddresses() + ip_address = infoblox_adapter.get( + "ipaddress", + {"address": "10.0.0.4", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + self.assertEqual("10.0.0.4", ip_address.address) + self.assertEqual("10.0.0.0/24", ip_address.prefix) + self.assertEqual(24, ip_address.prefix_length) + self.assertEqual("dev", ip_address.namespace) + self.assertEqual("Active", ip_address.status) + self.assertEqual("fa-server1", ip_address.description) + self.assertEqual("dhcp", ip_address.ip_addr_type) + self.assertEqual({}, ip_address.ext_attrs) + self.assertEqual("", ip_address.mac_address) + self.assertEqual(True, ip_address.has_fixed_address) + self.assertEqual("fa server", ip_address.fixed_address_comment) + self.assertEqual("RESERVED", ip_address.fixed_address_type) + self.assertEqual( + "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjIuMi4u:10.0.0.4/dev", ip_address.fixed_address_ref + ) + self.assertEqual(True, ip_address.has_a_record) + self.assertEqual(True, ip_address.has_ptr_record) + self.assertEqual(False, ip_address.has_host_record) + + a_record = infoblox_adapter.get( + "dnsarecord", + {"address": "10.0.0.4", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + self.assertEqual("10.0.0.4", a_record.address) + self.assertEqual("10.0.0.0/24", a_record.prefix) + self.assertEqual(24, a_record.prefix_length) + self.assertEqual("dev", a_record.namespace) + self.assertEqual("Active", a_record.status) + self.assertEqual("a record comment", a_record.description) + self.assertEqual("dhcp", a_record.ip_addr_type) + self.assertEqual({}, a_record.ext_attrs) + self.assertEqual("server11.nautobot.local.test", a_record.dns_name) + self.assertEqual( + "record:a/ZG5zLmJpbmRfYSQuMi50ZXN0LmxvY2FsLm5hdXRvYm90LHNlcnZlcjExLDEwLjAuMC40:server11.nautobot.local.test/default.dev", + a_record.ref, + ) + + ptr_record = infoblox_adapter.get( + "dnsptrrecord", + {"address": "10.0.0.4", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + self.assertEqual("10.0.0.4", ptr_record.address) + self.assertEqual("10.0.0.0/24", ptr_record.prefix) + self.assertEqual(24, ptr_record.prefix_length) + self.assertEqual("dev", ptr_record.namespace) + self.assertEqual("Active", ptr_record.status) + self.assertEqual("ptr record comment", ptr_record.description) + self.assertEqual("dhcp", ptr_record.ip_addr_type) + self.assertEqual({}, ptr_record.ext_attrs) + self.assertEqual("server11.nautobot.local.test", ptr_record.dns_name) + self.assertEqual( + "record:ptr/ZG5zLmJpbmRfcHRyJC4yLmFycGEuaW4tYWRkci4xMC4wLjAuNC5zZXJ2ZXIxMS5uYXV0b2JvdC5sb2NhbC50ZXN0:4.0.0.10.in-addr.arpa/default.dev", + ptr_record.ref, + ) + + mock_default_extra_attrs.assert_called_once() + self.assertEqual(mock_extra_attr_dict.call_count, 3) + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_default_ext_attrs", + autospec=True, + return_value={}, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.get_ext_attr_dict", + autospec=True, + side_effect=[{}, {}], + ) + def test_load_ip_addresses_fixed_dns_host( + self, + mock_extra_attr_dict, + mock_default_extra_attrs, + ): + """Test loading IP Addresses with one fixed address and one Host record.""" + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + infoblox_adapter = InfobloxAdapter( + job=unittest.mock.Mock(), + sync=unittest.mock.Mock(), + conn=mock_client, + config=self.config, + ) + infoblox_adapter.conn.get_ipaddr_status.return_value = "Active" + infoblox_adapter.conn.get_all_ipv4address_networks.side_effect = [ + [ + { + "_ref": "ipv4address/Li5pcHY0X2FkZHJlc3MkMTAuMC4wLjMvMg:10.0.0.4/dev", + "ip_address": "10.0.0.4", + "is_conflict": "false", + "mac_address": "", + "names": ["server1.nautobot.local.test"], + "network": "10.0.0.0/24", + "network_view": "dev", + "objects": [ + "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjQuMi4u:10.0.0.4/dev", + "record:host/ZG5zLmhvc3QkLjIudGVzdC5sb2NhbC5uYXV0b2JvdC5zZXJ2ZXIx:server1.nautobot.local.test/default.dev", + ], + "status": "USED", + "types": ["HOST", "RESERVATION"], + "usage": [ + "DHCP", + "DNS", + ], + } + ] + ] + infoblox_adapter.conn.get_fixed_address_by_ref.return_value = { + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjQuMi4u:10.0.0.4/dev", + "ipv4addr": "10.0.0.4", + "extattrs": {}, + "name": "fa-server1", + "comment": "fa server", + "network": "10.0.0.0/24", + "network_view": "dev", + } + infoblox_adapter.conn.get_host_record_by_ref.return_value = { + "_ref": "record:host/ZG5zLmhvc3QkLl9kZWZhdWx0LnRlc3QudGVzdGRldmljZTE:testdevice1.test/default", + "ipv4addr": "10.0.0.4", + "ipv4addrs": [ + { + "_ref": "record:host/ZG5zLmhvc3QkLjIudGVzdC5sb2NhbC5uYXV0b2JvdC5zZXJ2ZXIx:server1.nautobot.local.test/default.dev", + "configure_for_dhcp": "true", + "host": "server1.nautobot.local.test", + "ipv4addr": "10.0.0.4", + "mac": "", + } + ], + "name": "server1.nautobot.local.test", + "view": "default", + "comment": "host record comment", + } + infoblox_adapter.load_ipaddresses() + ip_address = infoblox_adapter.get( + "ipaddress", + {"address": "10.0.0.4", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + self.assertEqual("10.0.0.4", ip_address.address) + self.assertEqual("10.0.0.0/24", ip_address.prefix) + self.assertEqual(24, ip_address.prefix_length) + self.assertEqual("dev", ip_address.namespace) + self.assertEqual("Active", ip_address.status) + self.assertEqual("fa-server1", ip_address.description) + self.assertEqual("dhcp", ip_address.ip_addr_type) + self.assertEqual({}, ip_address.ext_attrs) + self.assertEqual("", ip_address.mac_address) + self.assertEqual(True, ip_address.has_fixed_address) + self.assertEqual("fa server", ip_address.fixed_address_comment) + self.assertEqual("RESERVED", ip_address.fixed_address_type) + self.assertEqual( + "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMC4wLjQuMi4u:10.0.0.4/dev", ip_address.fixed_address_ref + ) + self.assertEqual(False, ip_address.has_a_record) + self.assertEqual(False, ip_address.has_ptr_record) + self.assertEqual(True, ip_address.has_host_record) + + host_record = infoblox_adapter.get( + "dnshostrecord", + {"address": "10.0.0.4", "prefix": "10.0.0.0/24", "prefix_length": 24, "namespace": "dev"}, + ) + self.assertEqual("10.0.0.4", host_record.address) + self.assertEqual("10.0.0.0/24", host_record.prefix) + self.assertEqual(24, host_record.prefix_length) + self.assertEqual("dev", host_record.namespace) + self.assertEqual("Active", host_record.status) + self.assertEqual("host record comment", host_record.description) + self.assertEqual("dhcp", host_record.ip_addr_type) + self.assertEqual({}, host_record.ext_attrs) + self.assertEqual("server1.nautobot.local.test", host_record.dns_name) + self.assertEqual( + "record:host/ZG5zLmhvc3QkLjIudGVzdC5sb2NhbC5uYXV0b2JvdC5zZXJ2ZXIx:server1.nautobot.local.test/default.dev", + host_record.ref, + ) + + mock_default_extra_attrs.assert_called_once() + self.assertEqual(mock_extra_attr_dict.call_count, 2) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_models.py b/nautobot_ssot/tests/infoblox/test_infoblox_models.py index 4b8dc01d5..c31d7e9d5 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_models.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_models.py @@ -5,7 +5,11 @@ from django.test import TestCase -from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices +from nautobot_ssot.integrations.infoblox.choices import ( + DNSRecordTypeChoices, + FixedAddressTypeChoices, + InfobloxDeletableModelChoices, +) from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter @@ -20,15 +24,65 @@ def _get_ip_address_dict(attrs): "status": "Active", "prefix": "10.0.0.0/8", "prefix_length": 8, - "ip_addr_type": "host", + "ip_addr_type": "dhcp", "namespace": "Global", - "dns_name": "", } ipaddress_dict.update(attrs) return ipaddress_dict +def _get_dns_a_record_dict(attrs): + """Build dict used for creating diffsync DNS A record.""" + dns_a_record_dict = { + "description": "Test A Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.local.test.net", + "ip_addr_type": "host", + "namespace": "Global", + } + dns_a_record_dict.update(attrs) + + return dns_a_record_dict + + +def _get_dns_ptr_record_dict(attrs): + """Build dict used for creating diffsync DNS PTR record.""" + dns_ptr_record_dict = { + "description": "Test PTR Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.local.test.net", + "ip_addr_type": "host", + "namespace": "Global", + } + dns_ptr_record_dict.update(attrs) + + return dns_ptr_record_dict + + +def _get_dns_host_record_dict(attrs): + """Build dict used for creating diffsync DNS Host record.""" + dns_host_record_dict = { + "description": "Test Host Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.local.test.net", + "ip_addr_type": "host", + "namespace": "Global", + } + dns_host_record_dict.update(attrs) + + return dns_host_record_dict + + def _get_network_dict(attrs): """Build dict used for creating diffsync network.""" network_dict = { @@ -142,8 +196,8 @@ def test_network_update_network(self, mock_tag_involved_objects): mock_tag_involved_objects.assert_called_once() -class TestModelInfobloxIPAddressCreate(TestCase): - """Tests correct Fixed Address and DNS record are created.""" +class TestModelInfobloxIPAddress(TestCase): + """Tests Fixed Address record operations.""" def setUp(self): "Test class set up." @@ -151,18 +205,13 @@ def setUp(self): self.nb_adapter = NautobotAdapter(config=self.config) self.nb_adapter.job = Mock() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_nothing_gets_created(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_nothing_gets_created(self, mock_tag_involved_objects): """Validate nothing gets created if user selects DONT_CREATE_RECORD for DNS and Fixed Address options.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "mac_address": "52:1f:83:d4:9a:2e"} + nb_ipaddress_atrs = {"has_fixed_address": True} nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() @@ -180,233 +229,19 @@ def test_ip_address_create_nothing_gets_created(self, mock_tag_involved_objects, infoblox_adapter.job = Mock() self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.create_fixed_address.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Validate A Record is created.""" - nb_ipaddress_atrs = {"has_a_record": True, "dns_name": "server1.local.test.net"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Validate A and PTR records are created.""" - nb_ipaddress_atrs = {"has_a_record": True, "has_ptr_record": True, "dns_name": "server1.local.test.net"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Validate Host Record is created.""" - nb_ipaddress_atrs = {"has_host_record": True, "dns_name": "server1.local.test.net"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure DNS record is not created if DNS name is missing.""" - nb_ipaddress_atrs = {"has_a_record": True, "dns_name": ""} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger - self.nb_adapter.sync_to(infoblox_adapter) - log_msg = "Cannot create Infoblox DNS record for IP Address 10.0.0.1. DNS name is not defined." - job_logger.warning.assert_called_with(log_msg) - - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_not_called() - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=False, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure DNS record is not created if DNS name is invalid.""" - nb_ipaddress_atrs = {"has_a_record": True, "dns_name": ".invalid-dns-name"} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger - self.nb_adapter.sync_to(infoblox_adapter) - log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." - job_logger.warning.assert_called_with(log_msg) - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" - ) - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_objects): """Validate Fixed Address type RESERVED is created.""" nb_ipaddress_atrs = { - "fixed_address_name": "FixedAddresReserved", + "description": "FixedAddresReserved", "fixed_address_comment": "Fixed Address Reservation", "has_fixed_address": True, } @@ -438,20 +273,15 @@ def test_ip_address_create_fixed_address_reserved(self, mock_tag_involved_object infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_reserved_no_name(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_fixed_address_reserved_no_name(self, mock_tag_involved_objects): """Validate Fixed Address type RESERVED is created with empty name.""" nb_ipaddress_atrs = { + "description": "", "has_fixed_address": True, } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) @@ -482,21 +312,15 @@ def test_ip_address_create_fixed_address_reserved_no_name(self, mock_tag_involve infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects): """Validate Fixed Address type MAC_ADDRESS is created.""" nb_ipaddress_atrs = { - "fixed_address_name": "FixedAddresReserved", + "description": "FixedAddresReserved", "fixed_address_comment": "Fixed Address Reservation", "has_fixed_address": True, "mac_address": "52:1f:83:d4:9a:2e", @@ -530,20 +354,16 @@ def test_ip_address_create_fixed_address_mac(self, mock_tag_involved_objects, mo infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_mac_no_name(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_ip_address_create_fixed_address_mac_no_name(self, mock_tag_involved_objects): """Validate Fixed Address type MAC is created with empty name.""" nb_ipaddress_atrs = { + "description": "", + "fixed_address_comment": "", "has_fixed_address": True, "mac_address": "52:1f:83:d4:9a:2e", } @@ -576,378 +396,152 @@ def test_ip_address_create_fixed_address_mac_no_name(self, mock_tag_involved_obj infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, - return_value=True, ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_fixed_address_reserved_with_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type RESERVED is created with DNS Host record.""" + def test_ip_address_update_fixed_address_type_reserved_name_and_comment(self, mock_tag_involved_objects): + """Ensure Fixed Address type RESERVED has name and comment updated.""" nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", "has_fixed_address": True, + "description": "server2.local.test.net", + "fixed_address_comment": "new description", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - match_client="RESERVED", - network_view="default", - ) - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_fixed_address_reserved_with_a_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type RESERVED is created with DNS A record.""" - nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", - "has_fixed_address": True, - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() + inf_ipaddress_atrs = { + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "fixed_address_name": "server1.local.test.net", + "fixed_address_comment": "old description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - match_client="RESERVED", - network_view="default", - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_reserved_with_a_and_ptr_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type RESERVED is created with DNS A and PTR records.""" + def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty(self, mock_tag_involved_objects): + """Ensure Fixed Address type RESERVED has name and comment set to empty string.""" nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", "has_fixed_address": True, + "description": "", + "fixed_address_comment": "", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() + with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) infoblox_adapter.job = Mock() - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - match_client="RESERVED", - network_view="default", - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_fixed_address_mac_with_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type MAC_ADDRESS is created with DNS Host record.""" - nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", - "has_fixed_address": True, - "mac_address": "52:1f:83:d4:9a:2e", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() + inf_ipaddress_atrs = { + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "RESERVED", + "description": "server1.local.test.net", + "fixed_address_comment": "description", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - mac_address="52:1f:83:d4:9a:2e", - match_client="MAC_ADDRESS", - network_view="default", - ) - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"name": "", "comment": ""} ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_create_fixed_address_mac_with_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Validate Fixed Address type MAC_ADDRESS is created with DNS A record.""" + def test_ip_address_update_fixed_address_type_mac_update_mac(self, mock_tag_involved_objects): + """Ensure Fixed Address type MAC has MAC address updated.""" nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", "has_fixed_address": True, - "mac_address": "52:1f:83:d4:9a:2e", + "mac_address": "52:1f:83:d4:9a:ab", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) self.nb_adapter.add(nb_ds_ipaddress) self.nb_adapter.load() - with unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True - ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD - infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - inf_ds_namespace = infoblox_adapter.namespace( - name="Global", - ext_attrs={}, - ) - infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() - self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - mac_address="52:1f:83:d4:9a:2e", - match_client="MAC_ADDRESS", - network_view="default", - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_create_fixed_address_mac_with_a_and_ptr_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Validate Fixed Address type MAC_ADDRESS is created with DNS A and PTR records.""" - nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", - "has_fixed_address": True, - "mac_address": "52:1f:83:d4:9a:2e", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) - self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - infoblox_adapter.job = Mock() + inf_ipaddress_atrs = { + "has_fixed_address": True, + "fixed_address_ref": "fixedaddress/xyz", + "fixed_address_type": "MAC_ADDRESS", + "mac_address": "52:1f:83:d4:9a:2e", + } + inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) + infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - mac_address="52:1f:83:d4:9a:2e", - match_client="MAC_ADDRESS", - network_view="default", - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" + infoblox_adapter.conn.update_fixed_address.assert_called_once() + infoblox_adapter.conn.update_fixed_address.assert_called_with( + ref="fixedaddress/xyz", data={"mac": "52:1f:83:d4:9a:ab"} ) + infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" - ) - - -class TestModelInfobloxIPAddressUpdate(TestCase): - """Tests validating IP Address Update scenarios.""" - def setUp(self): - "Test class set up." - self.config = create_default_infoblox_config() - self.nb_adapter = NautobotAdapter(config=self.config) - self.nb_adapter.job = Mock() - - ############ - # TEST Fixed Address record updates - ########### - - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_reserved_name_and_comment( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type RESERVED has name and comment updated.""" + def test_ip_address_update_fixed_address_type_mac_name_and_comment(self, mock_tag_involved_objects): + """Ensure Fixed Address type MAC has name and comment updated.""" nb_ipaddress_atrs = { + "description": "server2.local.test.net", "has_fixed_address": True, - "fixed_address_name": "server2.local.test.net", "fixed_address_comment": "new description", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) @@ -957,7 +551,7 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() @@ -967,11 +561,11 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { + "description": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "fixed_address_name": "server1.local.test.net", - "fixed_address_comment": "description", + "fixed_address_type": "MAC_ADDRESS", + "fixed_address_comment": "old description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) @@ -980,31 +574,20 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment( infoblox_adapter.conn.update_fixed_address.assert_called_with( ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} ) - infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type RESERVED has name and comment set to empty string.""" + def test_ip_address_update_fixed_address_type_mac_name_and_comment_empty(self, mock_tag_involved_objects): + """Ensure Fixed Address type MAC has name and comment set to empty string.""" nb_ipaddress_atrs = { "has_fixed_address": True, - "fixed_address_name": "", + "description": "", "fixed_address_comment": "", } nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) @@ -1014,7 +597,7 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty( with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() @@ -1026,8 +609,8 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty( inf_ipaddress_atrs = { "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "fixed_address_name": "server1.local.test.net", + "fixed_address_type": "MAC_ADDRESS", + "description": "server1.local.test.net", "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) @@ -1037,42 +620,24 @@ def test_ip_address_update_fixed_address_type_reserved_name_and_comment_empty( infoblox_adapter.conn.update_fixed_address.assert_called_with( ref="fixedaddress/xyz", data={"name": "", "comment": ""} ) - infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_mac_update_mac( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type MAC has MAC address updated.""" - nb_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_fixed_address": True, - "mac_address": "52:1f:83:d4:9a:ab", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ip_address_delete_fail(self, mock_tag_involved_objects): + """Ensure Fixed Address is not deleted if object deletion is not enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + self.config.infoblox_deletable_models = [] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1081,55 +646,31 @@ def test_ip_address_update_fixed_address_type_mac_update_mac( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_type": "RESERVED", + "description": "server1.local.test.net", + "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"mac": "52:1f:83:d4:9a:ab"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_fixed_address_record_by_ref.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_mac_name_and_comment( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type MAC has name and comment updated.""" - nb_ipaddress_atrs = { - "fixed_address_name": "server2.local.test.net", - "has_fixed_address": True, - "fixed_address_comment": "new description", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ip_address_delete_success(self, mock_tag_involved_objects): + """Ensure Fixed Address is deleted if object deletion is enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + self.config.infoblox_deletable_models = [InfobloxDeletableModelChoices.FIXED_ADDRESS] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1138,27 +679,28 @@ def test_ip_address_update_fixed_address_type_mac_name_and_comment( ) infoblox_adapter.add(inf_ds_namespace) inf_ipaddress_atrs = { - "fixed_address_name": "server1.local.test.net", "has_fixed_address": True, "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "fixed_address_comment": "old description", + "fixed_address_type": "RESERVED", + "description": "server1.local.test.net", + "fixed_address_comment": "description", } inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) infoblox_adapter.add(inf_ds_ipaddress) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "server2.local.test.net", "comment": "new description"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_fixed_address_record_by_ref.assert_called_once() + infoblox_adapter.conn.delete_fixed_address_record_by_ref.assert_called_with(ref="fixedaddress/xyz") mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() + + +class TestModelInfobloxDnsARecord(TestCase): + """Tests DNS A model operations.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1169,58 +711,32 @@ def test_ip_address_update_fixed_address_type_mac_name_and_comment( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_type_mac_name_and_comment_empty( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address type MAC has name and comment set to empty string.""" - nb_ipaddress_atrs = { - "has_fixed_address": True, - "fixed_address_name": "", - "fixed_address_comment": "", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_create_nothing_gets_created(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate nothing gets created if user selects DONT_CREATE_RECORD for DNS and Fixed Address options.""" + nb_dnsarecord_atrs = {"has_fixed_address": "True"} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_dnsarecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "fixed_address_name": "server1.local.test.net", - "fixed_address_comment": "description", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"name": "", "comment": ""} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_fixed_address.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_not_called() - ########################### - # DNS Record Update tests - ########################### - @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, @@ -1230,49 +746,35 @@ def test_ip_address_update_fixed_address_type_mac_name_and_comment_empty( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure only Host record is updated.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True, "has_fixed_address": False} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_create(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate A Record is created.""" + nb_dnsarecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_dnsarecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_host_record": True, - "host_record_ref": "record:host/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_host_record.assert_called_once() - infoblox_adapter.conn.update_host_record.assert_called_with( - ref="record:host/xyz", data={"name": "server2.local.test.net"} + infoblox_adapter.conn.create_a_record.assert_called_once() + infoblox_adapter.conn.create_a_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test A Record", network_view="default" ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" ) @unittest.mock.patch( @@ -1284,49 +786,77 @@ def test_ip_address_update_host_record(self, mock_tag_involved_objects, mock_val "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_create_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure Host record is created during update if one doesn't exist. This can happen if fixed address currently exist and config was updated to enable host record creation.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "has_host_record": True, "has_fixed_address": False} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS A record is not created if DNS name is missing.""" + nb_arecord_atrs = {"dns_name": ""} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_host_record": False, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.update_host_record.assert_not_called() + log_msg = "Cannot create Infoblox DNS A record for IP Address 10.0.0.1. DNS name is not defined." + job_logger.warning.assert_called_with(log_msg) + + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() + + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", + autospec=True, + return_value=False, + ) + @unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_a_record_create_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS A record is not created if DNS name is invalid.""" + nb_arecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + self.nb_adapter.load() + with unittest.mock.patch( + "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True + ) as mock_client: + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) + inf_ds_namespace = infoblox_adapter.namespace( + name="Global", + ext_attrs={}, + ) + infoblox_adapter.add(inf_ds_namespace) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger + self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) + mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1337,13 +867,12 @@ def test_ip_address_update_create_host_record(self, mock_tag_involved_objects, m "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): + def test_a_record_update(self, mock_tag_involved_objects, mock_validate_dns_name): """Ensure only A record is updated.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_fixed_address": False} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + nb_arecord_atrs = {"dns_name": "server2.local.test.net"} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: @@ -1356,15 +885,12 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_arecord_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "a_record_ref": "record:a/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", + "ref": "record:a/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) self.nb_adapter.sync_to(infoblox_adapter) infoblox_adapter.conn.update_a_record.assert_called_once() infoblox_adapter.conn.update_a_record.assert_called_with( @@ -1385,82 +911,63 @@ def test_ip_address_update_a_record(self, mock_tag_involved_objects, mock_valida @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, - return_value=True, + return_value=False, ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_create_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure A record is created during update if one doesn't exist. This can happen if fixed address currently exist and config was updated to enable A record creation.""" - nb_ipaddress_atrs = {"dns_name": "server1.local.test.net", "has_a_record": True, "has_fixed_address": False} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_update_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS A record is not updated if DNS name is invalid.""" + nb_arecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_arecord_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": False, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", + "ref": "record:a/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) + mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) + infoblox_adapter.conn.update_a_record.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_create_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure PTR record is created if one doesn't currently exist.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "has_fixed_address": False, - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_delete_fail(self, mock_tag_involved_objects): + """Ensure DNS A record is not deleted if object deletion is not enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.infoblox_deletable_models = [] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1468,60 +975,30 @@ def test_ip_address_update_create_ptr_record(self, mock_tag_involved_objects, mo ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "has_a_record": True, - "has_ptr_record": False, - "a_record_ref": "record:a/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", + inf_arecord_atrs = { + "dns_name": "server1.local.test.net", + "ref": "record:a/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="Test IPAddress", network_view="default" - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.delete_a_record_by_ref.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure A and PTR records are updated.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "has_fixed_address": False, - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_a_record_delete_success(self, mock_tag_involved_objects): + """Ensure DNS A record is deleted if object deletion is enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.infoblox_deletable_models = [InfobloxDeletableModelChoices.DNS_A_RECORD] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1529,36 +1006,26 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_arecord_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "a_record_ref": "record:a/xyz", - "ptr_record_ref": "record:ptr/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", + "ref": "record:a/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.update_ptr_record.assert_called_once() - infoblox_adapter.conn.update_ptr_record.assert_called_with( - ref="record:ptr/xyz", data={"ptrdname": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_a_record.assert_called_once() - infoblox_adapter.conn.update_a_record.assert_called_with( - ref="record:a/xyz", data={"name": "server2.local.test.net"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() + infoblox_adapter.conn.delete_a_record_by_ref.assert_called_once() + infoblox_adapter.conn.delete_a_record_by_ref.assert_called_with(ref="record:a/xyz") mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + + +class TestModelInfobloxDnsHostRecord(TestCase): + """Tests DNS Host model operations.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1569,50 +1036,31 @@ def test_ip_address_update_a_and_ptr_record(self, mock_tag_involved_objects, moc "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if an A record is marked for update but Infoblox already has a Host record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_create_nothing_gets_created(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate nothing gets created if user selects DONT_CREATE_RECORD for DNS and Fixed Address options.""" + nb_dnshostrecord_atrs = {"has_fixed_address": "True"} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_host_record": True, - "host_record_ref": "record:host/xyz", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() + infoblox_adapter.conn.create_fixed_address.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() - - log_msg = "Cannot update A Record for IP Address, 10.0.0.1. It already has an existing Host Record." - job_logger.warning.assert_called_with(log_msg) + infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1623,52 +1071,37 @@ def test_ip_address_update_fail_a_and_host_record(self, mock_tag_involved_object "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if PTR record is marked for update but Infoblox already has a Host record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_ptr_record": True} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_create(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate Host Record is created.""" + nb_dnshostrecord_atrs = {"has_fixed_address": "True"} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_host_record": True, - "host_record_ref": "record:host/xyz", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() - - log_msg = ( - "Cannot create/update PTR Record for IP Address, 10.0.0.1. It already has an existing Host Record." + infoblox_adapter.conn.create_host_record.assert_called_once() + infoblox_adapter.conn.create_host_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test Host Record", network_view="default" ) - job_logger.warning.assert_called_with(log_msg) - mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" ) @unittest.mock.patch( @@ -1680,107 +1113,77 @@ def test_ip_address_update_fail_ptr_and_host_record(self, mock_tag_involved_obje "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_host_and_a_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if Host record is marked for update but Infoblox already has an A record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS Host record is not created if DNS name is missing.""" + nb_dnshostrecord_atrs = {"dns_name": ""} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_a_record": True, - "a_record_ref": "record:a/xyz", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() - - log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing A Record." + log_msg = "Cannot create Infoblox DNS Host record for IP Address 10.0.0.1. DNS name is not defined." job_logger.warning.assert_called_with(log_msg) mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + mock_validate_dns_name.assert_not_called() + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, - return_value=True, + return_value=False, ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure update fails if Host record is marked for update but Infoblox already has a PTR record.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_host_record": True} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_create_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS Host record is not created if DNS name is invalid.""" + nb_dnshostrecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_ptr_record": True, - "ptr_record_ref": "record:ptr/xyz", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - infoblox_adapter.conn.update_fixed_address.assert_not_called() - mock_validate_dns_name.assert_called_once() - - log_msg = "Cannot update Host Record for IP Address, 10.0.0.1. It already has an existing PTR Record." + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." job_logger.warning.assert_called_with(log_msg) mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -1791,134 +1194,108 @@ def test_ip_address_update_fail_host_and_ptr_record(self, mock_tag_involved_obje "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_no_dns_updates(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure DNS update/create is not trigerred if user configures DONT_CREATE_RECORD for dns_record_type.""" - nb_ipaddress_atrs = {"dns_name": "server2.local.test.net", "has_a_record": True, "has_ptr_record": True} - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_update(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure only Host record is updated.""" + nb_dnshostrecord_atrs = {"dns_name": "server2.local.test.net"} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD - self.config.dns_record_type = DNSRecordTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() - job_logger = Mock() - infoblox_adapter.job.logger = job_logger inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_hostrecord_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "a_record_ref": "record:a/xyz", - "ptr_record_ref": "record:ptr/xyz", + "ref": "record:host/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_hostrecord = infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + infoblox_adapter.add(inf_ds_hostrecord) self.nb_adapter.sync_to(infoblox_adapter) + infoblox_adapter.conn.update_host_record.assert_called_once() + infoblox_adapter.conn.update_host_record.assert_called_with( + ref="record:host/xyz", data={"name": "server2.local.test.net"} + ) infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() infoblox_adapter.conn.update_fixed_address.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, - return_value=True, + return_value=False, ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_create_fixed_address_reserved(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure Fixed Address Reserved is created with DNS record in place, no FA in Infoblox, and config asking for Reserved IP creation.""" - nb_ipaddress_atrs = { - "has_a_record": True, - "has_fixed_address": True, - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_update_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS Host record is not updated if DNS name is invalid.""" + nb_dnshostrecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_hostrecord = self.nb_adapter.dnshostrecord(**_get_dns_host_record_dict(nb_dnshostrecord_atrs)) + self.nb_adapter.add(nb_ds_hostrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "has_a_record": True, - "has_fixed_address": False, + inf_hostrecord_atrs = { + "dns_name": "server1.local.test.net", + "ref": "record:host/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_hostrecord = infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + infoblox_adapter.add(inf_ds_hostrecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - comment="Fixed Address Reservation", - match_client="RESERVED", - network_view="default", + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_called_once() + mock_validate_dns_name.assert_called_with( + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) - infoblox_adapter.conn.update_fixed_address.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_create_fixed_address_mac(self, mock_tag_involved_objects, mock_validate_dns_name): - """Ensure Fixed Address MAC is created with DNS record in place, no FA in Infoblox, and config asking for MAC IP creation.""" - nb_ipaddress_atrs = { - "has_a_record": True, - "mac_address": "52:1f:83:d4:9a:2e", - "has_fixed_address": True, - "fixed_address_name": "FixedAddresReserved", - "fixed_address_comment": "Fixed Address Reservation", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_delete_fail(self, mock_tag_involved_objects): + """Ensure DNS Host record is not deleted if object deletion is not enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.infoblox_deletable_models = [] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1926,67 +1303,30 @@ def test_ip_address_update_create_fixed_address_mac(self, mock_tag_involved_obje ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "has_a_record": True, - "has_fixed_address": False, + inf_hostrecord_atrs = { + "dns_name": "server1.local.test.net", + "ref": "record:host/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_hostrecord = infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + infoblox_adapter.add(inf_ds_hostrecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_fixed_address.assert_called_once() - infoblox_adapter.conn.create_fixed_address.assert_called_with( - ip_address="10.0.0.1", - name="FixedAddresReserved", - mac_address="52:1f:83:d4:9a:2e", - comment="Fixed Address Reservation", - match_client="MAC_ADDRESS", - network_view="default", - ) - infoblox_adapter.conn.update_fixed_address.assert_not_called() - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_host_record_by_ref.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_not_called() - - ############## - # Update Fixed Address and Update/Create DNS Record - ############## - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_reservation_and_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address RESERVED and Host records are updated together.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_host_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_host_record_delete_success(self, mock_tag_involved_objects): + """Ensure DNS Host record is deleted if object deletion is enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.infoblox_deletable_models = [InfobloxDeletableModelChoices.DNS_HOST_RECORD] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -1994,39 +1334,26 @@ def test_ip_address_update_fixed_address_reservation_and_host_record( ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_hostrecord_atrs = { "dns_name": "server1.local.test.net", - "has_host_record": True, - "has_fixed_address": True, - "host_record_ref": "record:host/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", + "ref": "record:host/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_hostrecord = infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + infoblox_adapter.add(inf_ds_hostrecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.update_host_record.assert_called_once() - infoblox_adapter.conn.update_host_record.assert_called_with( - ref="record:host/xyz", data={"comment": "new description", "name": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_host_record_by_ref.assert_called_once() + infoblox_adapter.conn.delete_host_record_by_ref.assert_called_with(ref="record:host/xyz") mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + + +class TestModelInfobloxDnsPTRRecord(TestCase): + """Tests DNS PTR model operations.""" + + def setUp(self): + "Test class set up." + self.config = create_default_infoblox_config() + self.nb_adapter = NautobotAdapter(config=self.config) + self.nb_adapter.job = Mock() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -2034,75 +1361,46 @@ def test_ip_address_update_fixed_address_reservation_and_host_record( return_value=True, ) @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", - autospec=True, - ) - def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address RESERVED and A+PTR records are updated together.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_a_record": True, - "has_ptr_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", + autospec=True, + ) + def test_ptr_record_create(self, mock_tag_involved_objects, mock_validate_dns_name): + """Validate PTR record is created.""" + nb_arecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + nb_ptrrecord_atrs = {} + nb_ds_ptrrecord = self.nb_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(nb_ptrrecord_atrs)) + self.nb_adapter.add(nb_ds_ptrrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "has_fixed_address": True, - "a_record_ref": "record:a/xyz", - "ptr_record_ref": "record:ptr/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = {} + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.update_a_record.assert_called_once() - infoblox_adapter.conn.update_a_record.assert_called_with( - ref="record:a/xyz", data={"comment": "new description", "name": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_ptr_record.assert_called_once() - infoblox_adapter.conn.update_ptr_record.assert_called_with( - ref="record:ptr/xyz", data={"comment": "new description", "ptrdname": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} + infoblox_adapter.conn.create_ptr_record.assert_called_once() + infoblox_adapter.conn.create_ptr_record.assert_called_with( + fqdn="server1.local.test.net", ip_address="10.0.0.1", comment="Test PTR Record", network_view="default" ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name="server1.local.test.net", network_view="default" ) @unittest.mock.patch( @@ -2114,144 +1412,89 @@ def test_ip_address_update_fixed_address_reservation_and_a_and_ptr_records( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_mac_and_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address MAC and Host records are updated together.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_host_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_create_no_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS PTR record is not created if DNS name is missing.""" + nb_arecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + nb_ptrrecord_atrs = {"dns_name": ""} + nb_ds_ptrrecord = self.nb_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(nb_ptrrecord_atrs)) + self.nb_adapter.add(nb_ds_ptrrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_host_record": True, - "has_fixed_address": True, - "host_record_ref": "record:host/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = {} + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Cannot create Infoblox PTR DNS record for IP Address 10.0.0.1. DNS name is not defined." + job_logger.warning.assert_called_with(log_msg) - infoblox_adapter.conn.update_host_record.assert_called_once() - infoblox_adapter.conn.update_host_record.assert_called_with( - ref="record:host/xyz", data={"comment": "new description", "name": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() + mock_tag_involved_objects.assert_called_once() + mock_validate_dns_name.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() - mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, - return_value=True, + return_value=False, ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address MAC and A+PTR records are updated together.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_a_record": True, - "has_ptr_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_create_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS PTR record is not created if DNS name is invalid.""" + nb_arecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + nb_ptrrecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_ptrrecord = self.nb_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(nb_ptrrecord_atrs)) + self.nb_adapter.add(nb_ds_ptrrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_a_record": True, - "has_ptr_record": True, - "has_fixed_address": True, - "a_record_ref": "record:a/xyz", - "ptr_record_ref": "record:ptr/xyz", - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = {} + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) - infoblox_adapter.conn.update_a_record.assert_called_once() - infoblox_adapter.conn.update_a_record.assert_called_with( - ref="record:a/xyz", data={"comment": "new description", "name": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_ptr_record.assert_called_once() - infoblox_adapter.conn.update_ptr_record.assert_called_with( - ref="record:ptr/xyz", data={"comment": "new description", "ptrdname": "server2.local.test.net"} - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) + infoblox_adapter.conn.create_a_record.assert_not_called() + infoblox_adapter.conn.create_ptr_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", @@ -2262,27 +1505,21 @@ def test_ip_address_update_fixed_address_mac_and_a_and_ptr_records( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_reservation_and_create_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address RESERVED is updated and Host record is created.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_host_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_update(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure PTR records is updated.""" + nb_arecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + nb_ptrrecord_atrs = {"dns_name": "server2.local.test.net"} + nb_ds_ptrrecord = self.nb_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(nb_ptrrecord_atrs)) + self.nb_adapter.add(nb_ds_ptrrecord) self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -2290,33 +1527,26 @@ def test_ip_address_update_fixed_address_reservation_and_create_host_record( ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_arecord_atrs = {} + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + inf_ptrrecord_atrs = { "dns_name": "server1.local.test.net", - "has_host_record": False, - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", + "ref": "record:ptr/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_ptrrecord = infoblox_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(inf_ptrrecord_atrs)) + infoblox_adapter.add(inf_ds_ptrrecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} + infoblox_adapter.conn.update_ptr_record.assert_called_once() + infoblox_adapter.conn.update_ptr_record.assert_called_with( + ref="record:ptr/xyz", data={"ptrdname": "server2.local.test.net"} ) + infoblox_adapter.conn.update_a_record.assert_not_called() + infoblox_adapter.conn.create_host_record.assert_not_called() infoblox_adapter.conn.update_host_record.assert_not_called() infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.update_fixed_address.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( @@ -2326,108 +1556,63 @@ def test_ip_address_update_fixed_address_reservation_and_create_host_record( @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", autospec=True, - return_value=True, + return_value=False, ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_reservation_and_create_a_and_ptr_records( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address RESERVED is updated and A+PTR records are created.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_a_record": True, - "has_ptr_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_update_invalid_dns_name(self, mock_tag_involved_objects, mock_validate_dns_name): + """Ensure DNS PTR record is not updated if DNS name is invalid.""" + nb_arecord_atrs = {} + nb_ds_arecord = self.nb_adapter.dnsarecord(**_get_dns_a_record_dict(nb_arecord_atrs)) + self.nb_adapter.add(nb_ds_arecord) + nb_ptrrecord_atrs = {"dns_name": ".invalid-dns-name"} + nb_ds_ptrrecord = self.nb_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(nb_ptrrecord_atrs)) + self.nb_adapter.add(nb_ds_ptrrecord) self.nb_adapter.load() - with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) - infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( name="Global", ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { - "dns_name": "server1.local.test.net", - "has_a_record": False, - "has_ptr_record": False, - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "RESERVED", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", - } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = {} + inf_ds_arecord = infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + infoblox_adapter.add(inf_ds_arecord) + infoblox_adapter.job = Mock() + job_logger = Mock() + infoblox_adapter.job.logger = job_logger self.nb_adapter.sync_to(infoblox_adapter) + log_msg = "Invalid zone fqdn in DNS name `.invalid-dns-name` for IP Address 10.0.0.1." + job_logger.warning.assert_called_with(log_msg) - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() infoblox_adapter.conn.update_ptr_record.assert_not_called() mock_tag_involved_objects.assert_called_once() mock_validate_dns_name.assert_called_once() mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" + infoblox_client=mock_client, dns_name=".invalid-dns-name", network_view="default" ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_mac_address_reservation_and_create_host_record( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address MAC is updated and Host record is created.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_host_record": True, - "fixed_address_name": "ReservedIP2", - "fixed_address_comment": "New Comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_delete_fail(self, mock_tag_involved_objects): + """Ensure DNS PTR record is not deleted if object deletion is not enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS - self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.infoblox_deletable_models = [] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -2435,70 +1620,30 @@ def test_ip_address_update_mac_address_reservation_and_create_host_record( ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_ptrrecord_atrs = { "dns_name": "server1.local.test.net", - "has_host_record": False, - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "description": "old description", - "fixed_address_name": "ReservedIP1", - "fixed_address_comment": "Old Comment", + "ref": "record:ptr/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_ptrrecord = infoblox_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(inf_ptrrecord_atrs)) + infoblox_adapter.add(inf_ds_ptrrecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_host_record.assert_called_once() - infoblox_adapter.conn.create_host_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "New Comment", "name": "ReservedIP2"} - ) - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.create_a_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.create_ptr_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_ptr_record_by_ref.assert_not_called() mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) - @unittest.mock.patch( - "nautobot_ssot.integrations.infoblox.diffsync.models.infoblox.validate_dns_name", - autospec=True, - return_value=True, - ) @unittest.mock.patch( "nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot.NautobotMixin.tag_involved_objects", autospec=True, ) - def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( - self, mock_tag_involved_objects, mock_validate_dns_name - ): - """Ensure Fixed Address MAC is updated and A+PTR records are created.""" - nb_ipaddress_atrs = { - "dns_name": "server2.local.test.net", - "description": "new description", - "has_fixed_address": True, - "has_a_record": True, - "has_ptr_record": True, - "fixed_address_name": "new fa name", - "fixed_address_comment": "new fa comment", - } - nb_ds_ipaddress = self.nb_adapter.ipaddress(**_get_ip_address_dict(nb_ipaddress_atrs)) - self.nb_adapter.add(nb_ds_ipaddress) + def test_ptr_record_delete_success(self, mock_tag_involved_objects): + """Ensure DNS PTR record is deleted if object deletion is enabled in the config.""" self.nb_adapter.load() with unittest.mock.patch( "nautobot_ssot.integrations.infoblox.utils.client.InfobloxApi", autospec=True ) as mock_client: - self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.infoblox_deletable_models = [InfobloxDeletableModelChoices.DNS_PTR_RECORD] infoblox_adapter = InfobloxAdapter(conn=mock_client, config=self.config) infoblox_adapter.job = Mock() inf_ds_namespace = infoblox_adapter.namespace( @@ -2506,39 +1651,13 @@ def test_ip_address_update_fixed_address_mac_and_create_a_and_ptr_records( ext_attrs={}, ) infoblox_adapter.add(inf_ds_namespace) - inf_ipaddress_atrs = { + inf_ptrrecord_atrs = { "dns_name": "server1.local.test.net", - "has_a_record": False, - "has_ptr_record": False, - "has_fixed_address": True, - "fixed_address_ref": "fixedaddress/xyz", - "fixed_address_type": "MAC_ADDRESS", - "description": "old description", - "fixed_address_name": "old fa name", - "fixed_address_comment": "old fa comment", + "ref": "record:ptr/xyz", } - inf_ds_ipaddress = infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_ipaddress_atrs)) - infoblox_adapter.add(inf_ds_ipaddress) + inf_ds_ptrrecord = infoblox_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(inf_ptrrecord_atrs)) + infoblox_adapter.add(inf_ds_ptrrecord) self.nb_adapter.sync_to(infoblox_adapter) - - infoblox_adapter.conn.create_a_record.assert_called_once() - infoblox_adapter.conn.create_a_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.create_ptr_record.assert_called_once() - infoblox_adapter.conn.create_ptr_record.assert_called_with( - fqdn="server2.local.test.net", ip_address="10.0.0.1", comment="new description", network_view="default" - ) - infoblox_adapter.conn.update_fixed_address.assert_called_once() - infoblox_adapter.conn.update_fixed_address.assert_called_with( - ref="fixedaddress/xyz", data={"comment": "new fa comment", "name": "new fa name"} - ) - infoblox_adapter.conn.create_host_record.assert_not_called() - infoblox_adapter.conn.update_host_record.assert_not_called() - infoblox_adapter.conn.update_a_record.assert_not_called() - infoblox_adapter.conn.update_ptr_record.assert_not_called() + infoblox_adapter.conn.delete_ptr_record_by_ref.assert_called_once() + infoblox_adapter.conn.delete_ptr_record_by_ref.assert_called_with(ref="record:ptr/xyz") mock_tag_involved_objects.assert_called_once() - mock_validate_dns_name.assert_called_once() - mock_validate_dns_name.assert_called_with( - infoblox_client=mock_client, dns_name="server2.local.test.net", network_view="default" - ) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py index faffec45d..260c160a7 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -7,6 +7,7 @@ from nautobot.extras.models import RelationshipAssociation, Status from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship @@ -312,3 +313,54 @@ def test_load_ipaddresses_loads_ips_ipv4_and_ipv6(self): ("2001:5b0:4100::1", "Global"), }, ) + + def test_load_ipaddresses_load_host_records(self): + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = mock.Mock() + sync_filters = [{"network_view": "default"}] + nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_records = { + (hostr.address, hostr.namespace, hostr.dns_name) for hostr in nb_adapter.get_all("dnshostrecord") + } + self.assertEqual( + actual_records, + { + ("10.0.1.1", "Global", "server1.nautobot.test.com"), + ("10.0.1.2", "Global", "server2.nautobot.test.com"), + }, + ) + + def test_load_ipaddresses_load_a_records(self): + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = mock.Mock() + sync_filters = [{"network_view": "dev"}] + nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_records = { + (hostr.address, hostr.namespace, hostr.dns_name) for hostr in nb_adapter.get_all("dnsarecord") + } + self.assertEqual( + actual_records, + { + ("10.0.1.1", "dev", "server10.nautobot.test.com"), + ("10.2.1.1", "dev", "server11.nautobot.test.com"), + }, + ) + + def test_load_ipaddresses_load_ptr_records(self): + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = mock.Mock() + sync_filters = [{"network_view": "test"}] + nb_adapter.load_ipaddresses(include_ipv4=True, include_ipv6=False, sync_filters=sync_filters) + actual_records = { + (hostr.address, hostr.namespace, hostr.dns_name) for hostr in nb_adapter.get_all("dnsptrrecord") + } + self.assertEqual( + actual_records, + { + ("10.5.1.5", "test", "server21.nautobot.test.com"), + ("10.2.1.10", "test", "server20.nautobot.test.com"), + }, + ) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py index 6781591df..c7177eff4 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_models.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -2,10 +2,17 @@ """Unit tests for the Infoblox Diffsync models.""" from unittest.mock import Mock +from django.contrib.contenttypes.models import ContentType from django.test import TestCase -from nautobot.extras.models import Status, Tag +from nautobot.extras.choices import CustomFieldTypeChoices +from nautobot.extras.models import CustomField, Status, Tag from nautobot.ipam.models import IPAddress, Namespace, Prefix +from nautobot_ssot.integrations.infoblox.choices import ( + DNSRecordTypeChoices, + FixedAddressTypeChoices, + NautobotDeletableModelChoices, +) from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter @@ -30,6 +37,57 @@ def _get_ip_address_dict(attrs): return ipaddress_dict +def _get_dns_a_record_dict(attrs): + """Build dict used for creating diffsync DNS A record.""" + dns_a_record_dict = { + "description": "Test A Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.nautobot.local.net", + "ip_addr_type": "host", + "namespace": "dev", + } + dns_a_record_dict.update(attrs) + + return dns_a_record_dict + + +def _get_dns_ptr_record_dict(attrs): + """Build dict used for creating diffsync DNS PTR record.""" + dns_ptr_record_dict = { + "description": "Test PTR Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.local.test.net", + "ip_addr_type": "host", + "namespace": "dev", + } + dns_ptr_record_dict.update(attrs) + + return dns_ptr_record_dict + + +def _get_dns_host_record_dict(attrs): + """Build dict used for creating diffsync DNS Host record.""" + dns_host_record_dict = { + "description": "Test Host Record", + "address": "10.0.0.1", + "status": "Active", + "prefix": "10.0.0.0/8", + "prefix_length": 8, + "dns_name": "server1.local.test.net", + "ip_addr_type": "host", + "namespace": "dev", + } + dns_host_record_dict.update(attrs) + + return dns_host_record_dict + + def _get_network_dict(attrs): """Build dict used for creating diffsync network.""" network_dict = { @@ -144,7 +202,52 @@ def setUp(self): ) self.infoblox_adapter.add(inf_ds_namespace) - def test_ip_address_create_address_from_fixed_address_reserved(self): + mac_address_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="mac_address", + defaults={ + "label": "MAC Address", + }, + ) + mac_address_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + fixed_address_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="fixed_address_comment", + defaults={ + "label": "Fixed Address Comment", + }, + ) + fixed_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_a_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_a_record_comment", + defaults={ + "label": "DNS A Record Comment", + }, + ) + dns_a_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_host_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_host_record_comment", + defaults={ + "label": "DNS Host Record Comment", + }, + ) + dns_host_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_ptr_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_ptr_record_comment", + defaults={ + "label": "DNS PTR Record Comment", + }, + ) + dns_ptr_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + def test_ip_address_create_from_fixed_address_reserved(self): """Validate ip address gets created from Infoblox fixed address reservation.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) @@ -152,7 +255,7 @@ def test_ip_address_create_address_from_fixed_address_reserved(self): inf_address_atrs = { "ip_addr_type": "dhcp", "has_fixed_address": True, - "fixed_address_name": "FixedAddressReserved", + "description": "FixedAddressReserved", "fixed_address_comment": "Created From FA Reserved", } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) @@ -166,6 +269,7 @@ def test_ip_address_create_address_from_fixed_address_reserved(self): namespace=self.namespace_dev, ) + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -181,7 +285,7 @@ def test_ip_address_create_address_from_fixed_address_reserved(self): self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) - def test_ip_address_create_address_from_fixed_address_mac(self): + def test_ip_address_create_from_fixed_address_mac(self): """Validate ip address gets created from Infoblox fixed address with mac address.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) @@ -190,7 +294,7 @@ def test_ip_address_create_address_from_fixed_address_mac(self): "ip_addr_type": "dhcp", "has_fixed_address": True, "mac_address": "52:1f:83:d4:9a:2e", - "fixed_address_name": "FixedAddressMAC", + "description": "FixedAddressMAC", "fixed_address_comment": "Created From FA MAC", } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) @@ -204,6 +308,7 @@ def test_ip_address_create_address_from_fixed_address_mac(self): namespace=self.namespace_dev, ) + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -220,19 +325,102 @@ def test_ip_address_create_address_from_fixed_address_mac(self): self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) - def test_ip_address_create_address_from_dns_record(self): - """Validate ip address gets created from Infoblox DNS host record. This also applies to A record.""" + def test_ip_address_create_from_dns_a_record(self): + """Validate ip address gets created from Infoblox DNS A record.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) self.infoblox_adapter.add(inf_ds_network) - inf_address_atrs = { - "ip_addr_type": "host", - "has_host_record": True, + inf_arecord_atrs = { "dns_name": "server1.nautobot.local.net", - "description": "Server1", + "ref": "record:a/xyz", + } + inf_ds_arecord = self.infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + self.infoblox_adapter.add(inf_ds_arecord) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("Test A Record", ipaddress.custom_field_data.get("dns_a_record_comment")) + self.assertEqual("", ipaddress.description) + self.assertEqual("host", ipaddress.type) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_from_dns_host_record(self): + """Validate ip address gets created from Infoblox DNS Host record.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_hostrecord_atrs = { + "address": "10.0.0.2", + "dns_name": "server1.nautobot.local.net", + "ref": "record:host/xyz", + } + inf_ds_hostrecord = self.infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + self.infoblox_adapter.add(inf_ds_hostrecord) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.DONT_CREATE_RECORD + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.2/8", parent__namespace__name="dev") + self.assertEqual("10.0.0.2/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("Test Host Record", ipaddress.custom_field_data.get("dns_host_record_comment")) + self.assertEqual("", ipaddress.description) + self.assertEqual("host", ipaddress.type) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_from_fixed_address_reserved_and_dns_a_record(self): + """Validate ip address gets created from Infoblox Fixed Address MAC and updated with DNS A record data.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_a_record": True, + "description": "FixedAddressMAC", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_comment": "Created From FA MAC", } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) self.infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = { + "dns_name": "server1.nautobot.local.net", + "ref": "record:a/xyz", + } + inf_ds_arecord = self.infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + self.infoblox_adapter.add(inf_ds_arecord) Prefix.objects.get_or_create( prefix="10.0.0.0/8", @@ -242,6 +430,8 @@ def test_ip_address_create_address_from_dns_record(self): namespace=self.namespace_dev, ) + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -252,29 +442,90 @@ def test_ip_address_create_address_from_dns_record(self): self.assertEqual("dev", ipaddress.parent.namespace.name) self.assertEqual("Active", ipaddress.status.name) self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) - self.assertEqual("Server1", ipaddress.description) - self.assertEqual("host", ipaddress.type) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("Test A Record", ipaddress.custom_field_data.get("dns_a_record_comment")) self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) - def test_ip_address_create_address_from_fixed_address_mac_and_dns_record(self): - """Validate ip address gets created from Infoblox Fixed Address MAC + A host record. - Fixed address name takes precedence and is recorded in the description field of Nautobot IP Address. - """ + def test_ip_address_create_from_fixed_address_mac_and_dns_a_ptr_records(self): + """Validate ip address gets created from Infoblox Fixed Address MAC and updated with DNS A and PTR records data.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) self.infoblox_adapter.add(inf_ds_network) inf_address_atrs = { "ip_addr_type": "dhcp", "has_a_record": True, + "description": "FixedAddressMAC", + "has_fixed_address": True, + "mac_address": "52:1f:83:d4:9a:2e", + "fixed_address_comment": "Created From FA MAC", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + inf_arecord_atrs = { "dns_name": "server1.nautobot.local.net", - "description": "Server1", + "ref": "record:a/xyz", + } + inf_ds_arecord = self.infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + self.infoblox_adapter.add(inf_ds_arecord) + inf_ptrrecord_atrs = { + "dns_name": "server1.nautobot.local.net", + "ref": "record:ptr/xyz", + } + inf_ds_ptrrecord = self.infoblox_adapter.dnsptrrecord(**_get_dns_ptr_record_dict(inf_ptrrecord_atrs)) + self.infoblox_adapter.add(inf_ds_ptrrecord) + + Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) + self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) + self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("Test A Record", ipaddress.custom_field_data.get("dns_a_record_comment")) + self.assertEqual("Test PTR Record", ipaddress.custom_field_data.get("dns_ptr_record_comment")) + self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) + + def test_ip_address_create_from_fixed_address_mac_and_dns_host_record(self): + """Validate ip address gets created from Infoblox Fixed Address MAC and updated with DNS host record data.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_a_record": True, + "description": "FixedAddressMAC", "has_fixed_address": True, "mac_address": "52:1f:83:d4:9a:2e", - "fixed_address_name": "FixedAddressMAC", "fixed_address_comment": "Created From FA MAC", } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) self.infoblox_adapter.add(inf_ds_ipaddress) + inf_hostrecord_atrs = { + "dns_name": "server1.nautobot.local.net", + "ref": "record:host/xyz", + } + inf_ds_hostrecord = self.infoblox_adapter.dnshostrecord(**_get_dns_host_record_dict(inf_hostrecord_atrs)) + self.infoblox_adapter.add(inf_ds_hostrecord) Prefix.objects.get_or_create( prefix="10.0.0.0/8", @@ -284,6 +535,8 @@ def test_ip_address_create_address_from_fixed_address_mac_and_dns_record(self): namespace=self.namespace_dev, ) + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -298,13 +551,14 @@ def test_ip_address_create_address_from_fixed_address_mac_and_dns_record(self): self.assertEqual("dhcp", ipaddress.type) self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("Test Host Record", ipaddress.custom_field_data.get("dns_host_record_comment")) self.assertIn(self.tag_sync_from_infoblox, ipaddress.tags.all()) ############ # IP Address updates ########### - def test_ip_address_update_address_from_fixed_address_reserved(self): + def test_ip_address_update_from_fixed_address_reserved(self): """Validate ip address gets updated from Infoblox fixed address reservation.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) @@ -312,8 +566,8 @@ def test_ip_address_update_address_from_fixed_address_reserved(self): inf_address_atrs = { "ip_addr_type": "dhcp", "has_fixed_address": True, - "fixed_address_name": "FixedAddressMAC", - "fixed_address_comment": "Created From FA MAC", + "description": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", "ext_attrs": {"gateway": "10.0.0.254"}, } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) @@ -334,6 +588,7 @@ def test_ip_address_update_address_from_fixed_address_reserved(self): parent=parent_pfx, ) + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -344,9 +599,9 @@ def test_ip_address_update_address_from_fixed_address_reserved(self): self.assertEqual("10.0.0.1/8", str(ipaddress.address)) self.assertEqual("dev", ipaddress.parent.namespace.name) self.assertEqual("Active", ipaddress.status.name) - self.assertEqual("FixedAddressMAC", ipaddress.description) + self.assertEqual("FixedAddressReserved", ipaddress.description) self.assertEqual("dhcp", ipaddress.type) - self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) self.assertEqual("10.0.0.254", ipaddress.custom_field_data.get("gateway")) def test_ip_address_update_address_from_fixed_address_mac(self): @@ -358,7 +613,7 @@ def test_ip_address_update_address_from_fixed_address_mac(self): "ip_addr_type": "dhcp", "has_fixed_address": True, "mac_address": "52:1f:83:d4:9a:2e", - "fixed_address_name": "FixedAddressMAC", + "description": "FixedAddressMAC", "fixed_address_comment": "Created From FA MAC", "ext_attrs": {"gateway": "10.0.0.254"}, } @@ -382,6 +637,8 @@ def test_ip_address_update_address_from_fixed_address_mac(self): "_custom_field_data": {"mac_address": "52:1f:83:d4:9a:2a"}, }, ) + + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -397,16 +654,23 @@ def test_ip_address_update_address_from_fixed_address_mac(self): self.assertEqual("52:1f:83:d4:9a:2e", ipaddress.custom_field_data.get("mac_address")) self.assertEqual("Created From FA MAC", ipaddress.custom_field_data.get("fixed_address_comment")) - def test_ip_address_update_address_from_dns_record(self): - """Validate ip address gets created from Infoblox DNS record.""" + def test_ip_address_update_address_from_dns_a_record(self): + """Validate ip address gets created from Infoblox DNS A record.""" inf_network_atrs = {"network_type": "network", "namespace": "dev"} inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) self.infoblox_adapter.add(inf_ds_network) - inf_address_atrs = { - "ip_addr_type": "host", - "has_a_record": True, + inf_arecord_atrs = { "dns_name": "server1.nautobot.local.net", - "description": "Server1", + "ref": "record:a/xyz", + } + inf_ds_arecord = self.infoblox_adapter.dnsarecord(**_get_dns_a_record_dict(inf_arecord_atrs)) + self.infoblox_adapter.add(inf_ds_arecord) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "description": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", + "ext_attrs": {"gateway": "10.0.0.254"}, } inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) self.infoblox_adapter.add(inf_ds_ipaddress) @@ -426,9 +690,16 @@ def test_ip_address_update_address_from_dns_record(self): defaults={ "dns_name": "server.nautobot.local.net", "description": "OldDescription", - "_custom_field_data": {"mac_address": "52:1f:83:d4:9a:2a"}, + "_custom_field_data": { + "mac_address": "52:1f:83:d4:9a:2a", + "fixed_address_comment": "Old FA comment", + "dns_a_record_comment": "Old A record comment", + }, }, ) + + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED nb_adapter = NautobotAdapter(config=self.config) nb_adapter.job = Mock() nb_adapter.load() @@ -439,6 +710,256 @@ def test_ip_address_update_address_from_dns_record(self): self.assertEqual("10.0.0.1/8", str(ipaddress.address)) self.assertEqual("dev", ipaddress.parent.namespace.name) self.assertEqual("Active", ipaddress.status.name) - self.assertEqual("Server1", ipaddress.description) + self.assertEqual("FixedAddressReserved", ipaddress.description) self.assertEqual("server1.nautobot.local.net", ipaddress.dns_name) - self.assertEqual("host", ipaddress.type) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("Test A Record", ipaddress.custom_field_data.get("dns_a_record_comment")) + self.assertEqual("dhcp", ipaddress.type) + + ############ + # IP Address deletes + ########### + + def test_ip_address_delete_fail(self): + """Validate ip address is not deleted if object deletion is not enabled in the config.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="dhcp", + parent=parent_pfx, + defaults={ + "description": "OldDescription", + "_custom_field_data": { + "mac_address": "52:1f:83:d4:9a:2a", + "fixed_address_comment": "Old FA comment", + }, + }, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.nautobot_deletable_models = [] + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("OldDescription", ipaddress.description) + self.assertEqual("dhcp", ipaddress.type) + self.assertEqual("Old FA comment", ipaddress.custom_field_data.get("fixed_address_comment")) + + def test_ip_address_delete_success(self): + """Validate ip address is deleted if object deletion is enabled in the config.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="host", + description="OldDescription", + parent=parent_pfx, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.nautobot_deletable_models = [NautobotDeletableModelChoices.IP_ADDRESS] + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + with self.assertRaises(IPAddress.DoesNotExist): + IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + def test_ip_address_delete_a_record(self): + """Validate A record data for ip address is deleted if object deletion is enabled in the config.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "description": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="dhcp", + parent=parent_pfx, + defaults={ + "description": "FixedAddressReserved", + "dns_name": "server1.nautobot.local.net", + "_custom_field_data": { + "fixed_address_comment": "Created From FA Reserved", + "dns_a_record_comment": "Created From A Record", + }, + }, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.nautobot_deletable_models = [NautobotDeletableModelChoices.DNS_A_RECORD] + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressReserved", ipaddress.description) + self.assertEqual("", ipaddress.dns_name) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("", ipaddress.custom_field_data.get("dns_a_record_comment")) + self.assertEqual("dhcp", ipaddress.type) + + def test_ip_address_delete_host_record(self): + """Validate Host record data for ip address is deleted if object deletion is enabled in the config.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "description": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="dhcp", + parent=parent_pfx, + defaults={ + "description": "FixedAddressReserved", + "dns_name": "server1.nautobot.local.net", + "_custom_field_data": { + "fixed_address_comment": "Created From FA Reserved", + "dns_host_record_comment": "Created From Host Record", + }, + }, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.HOST_RECORD + self.config.nautobot_deletable_models = [NautobotDeletableModelChoices.DNS_HOST_RECORD] + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressReserved", ipaddress.description) + self.assertEqual("", ipaddress.dns_name) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("", ipaddress.custom_field_data.get("dns_host_record_comment")) + self.assertEqual("dhcp", ipaddress.type) + + def test_ip_address_delete_a_ptr_records(self): + """Validate A and PTR record data for ip address is deleted if object deletion is enabled in the config.""" + inf_network_atrs = {"network_type": "network", "namespace": "dev"} + inf_ds_network = self.infoblox_adapter.prefix(**_get_network_dict(inf_network_atrs)) + self.infoblox_adapter.add(inf_ds_network) + inf_address_atrs = { + "ip_addr_type": "dhcp", + "has_fixed_address": True, + "description": "FixedAddressReserved", + "fixed_address_comment": "Created From FA Reserved", + } + inf_ds_ipaddress = self.infoblox_adapter.ipaddress(**_get_ip_address_dict(inf_address_atrs)) + self.infoblox_adapter.add(inf_ds_ipaddress) + + parent_pfx, _ = Prefix.objects.get_or_create( + prefix="10.0.0.0/8", + status=self.status_active, + type="network", + description="TestNetwork", + namespace=self.namespace_dev, + ) + IPAddress.objects.get_or_create( + address="10.0.0.1/8", + status=self.status_active, + type="dhcp", + parent=parent_pfx, + defaults={ + "description": "FixedAddressReserved", + "dns_name": "server1.nautobot.local.net", + "_custom_field_data": { + "fixed_address_comment": "Created From FA Reserved", + "dns_a_record_comment": "Created From A Record", + "dns_ptr_record_comment": "Created From PTR Record", + }, + }, + ) + + self.config.fixed_address_type = FixedAddressTypeChoices.RESERVED + self.config.dns_record_type = DNSRecordTypeChoices.A_AND_PTR_RECORD + self.config.nautobot_deletable_models = [ + NautobotDeletableModelChoices.DNS_A_RECORD, + NautobotDeletableModelChoices.DNS_PTR_RECORD, + ] + nb_adapter = NautobotAdapter(config=self.config) + nb_adapter.job = Mock() + nb_adapter.load() + self.infoblox_adapter.sync_to(nb_adapter) + + ipaddress = IPAddress.objects.get(address="10.0.0.1/8", parent__namespace__name="dev") + + self.assertEqual("10.0.0.1/8", str(ipaddress.address)) + self.assertEqual("dev", ipaddress.parent.namespace.name) + self.assertEqual("Active", ipaddress.status.name) + self.assertEqual("FixedAddressReserved", ipaddress.description) + self.assertEqual("", ipaddress.dns_name) + self.assertEqual("Created From FA Reserved", ipaddress.custom_field_data.get("fixed_address_comment")) + self.assertEqual("", ipaddress.custom_field_data.get("dns_a_record_comment")) + self.assertEqual("", ipaddress.custom_field_data.get("dns_ptr_record_comment")) + self.assertEqual("dhcp", ipaddress.type) diff --git a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py index 9bf2d1193..f4113c490 100644 --- a/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py +++ b/nautobot_ssot/tests/infoblox/test_tags_and_cfs.py @@ -9,6 +9,7 @@ from nautobot.extras.models import CustomField, Status, Tag from nautobot.ipam.models import VLAN, IPAddress, Namespace, Prefix, VLANGroup +from nautobot_ssot.integrations.infoblox.choices import DNSRecordTypeChoices, FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_prefix_relationship @@ -55,6 +56,8 @@ def test_tags_have_correct_content_types_set(self): def test_objects_synced_from_infoblox_are_tagged(self): """Ensure objects synced from Infoblox have 'SSoT Synced from Infoblox' tag applied.""" + self.config.dns_record_type = DNSRecordTypeChoices.A_RECORD + self.config.fixed_address_type = FixedAddressTypeChoices.MAC_ADDRESS nautobot_adapter = NautobotAdapter(config=self.config) nautobot_adapter.job = Mock() nautobot_adapter.load() @@ -87,10 +90,10 @@ def test_objects_synced_from_infoblox_are_tagged(self): description="Test IPAddress", address="10.0.0.1", status="Active", - dns_name="", + has_fixed_address=True, prefix="10.0.0.0/8", prefix_length=8, - ip_addr_type="host", + ip_addr_type="dhcp", ext_attrs={}, namespace="Global", ) From 36d9cade26582febde8fa1205befa51b519b5778 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 17 Jun 2024 16:19:59 +0100 Subject: [PATCH 159/229] Linting, docs and minor changes. --- .../integrations/infoblox/choices.py | 10 ++++-- .../infoblox/diffsync/adapters/infoblox.py | 6 ++-- .../infoblox/diffsync/models/__init__.py | 25 +++++++------ .../infoblox/diffsync/models/base.py | 1 + .../infoblox/diffsync/models/infoblox.py | 9 ++--- .../infoblox/diffsync/models/nautobot.py | 2 +- nautobot_ssot/integrations/infoblox/forms.py | 1 + .../integrations/infoblox/utils/client.py | 35 +++++++------------ 8 files changed, 40 insertions(+), 49 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/choices.py b/nautobot_ssot/integrations/infoblox/choices.py index 30e3f5b07..35be04cb6 100644 --- a/nautobot_ssot/integrations/infoblox/choices.py +++ b/nautobot_ssot/integrations/infoblox/choices.py @@ -45,7 +45,10 @@ class DNSRecordTypeChoices(ChoiceSet): class InfobloxDeletableModelChoices(ChoiceSet): - """Choiceset used by SSOTInfobloxConfig.""" + """Choiceset used by SSOTInfobloxConfig. + + These choices specify types of records that can be allowed to be deleted in Infoblox. + """ DNS_A_RECORD = "dns-a-record" DNS_HOST_RECORD = "dns-host-record" @@ -61,7 +64,10 @@ class InfobloxDeletableModelChoices(ChoiceSet): class NautobotDeletableModelChoices(ChoiceSet): - """Choiceset used by SSOTInfobloxConfig.""" + """Choiceset used by SSOTInfobloxConfig. + + These choices specify types of records that can be allowed to be deleted in Nautobot. + """ DNS_A_RECORD = "dns-a-record" DNS_HOST_RECORD = "dns-host-record" diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index e8cae6842..36f1efca1 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -10,8 +10,8 @@ from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import ( - InfobloxDnsHostRecord, InfobloxDnsARecord, + InfobloxDnsHostRecord, InfobloxDnsPTRRecord, InfobloxIPAddress, InfobloxNamespace, @@ -256,7 +256,7 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals elif obj_type == "record:ptr": new_ip.has_ptr_record = True ptr_record_ref = ref - # We currently only support RESERVED and MAC_ADDRESS types for fixed address + # We currently only support RESERVED and MAC_ADDRESS types for fixed address objects. elif obj_type == "fixedaddress": if "RESERVATION" in _ip["types"]: new_ip.fixed_address_type = "RESERVED" @@ -288,7 +288,7 @@ def load_ipaddresses(self): # pylint: disable=too-many-branches,too-many-locals if new_ip.has_ptr_record: self._load_dns_ptr_record_for_ip(ref=ptr_record_ref, ip_record=new_ip, namespace=namespace) - if new_ip.has_fixed_address or new_ip.has_a_record or new_ip.has_ptr_record: + if new_ip.has_fixed_address or new_ip.has_a_record or new_ip.has_host_record: self.add(new_ip) def _load_dns_host_record_for_ip(self, ref: str, ip_record: object, namespace: str): diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py index bbe411500..7bfda1de1 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/__init__.py @@ -1,26 +1,25 @@ """Initialize models for Nautobot and Infoblox.""" -from .nautobot import ( - NautobotDnsARecord, - NautobotDnsHostRecord, - NautobotDnsPTRRecord, - NautobotNamespace, - NautobotNetwork, - NautobotIPAddress, - NautobotVlanGroup, - NautobotVlan, -) from .infoblox import ( InfobloxDnsARecord, InfobloxDnsHostRecord, InfobloxDnsPTRRecord, + InfobloxIPAddress, InfobloxNamespace, InfobloxNetwork, - InfobloxIPAddress, - InfobloxVLANView, InfobloxVLAN, + InfobloxVLANView, +) +from .nautobot import ( + NautobotDnsARecord, + NautobotDnsHostRecord, + NautobotDnsPTRRecord, + NautobotIPAddress, + NautobotNamespace, + NautobotNetwork, + NautobotVlan, + NautobotVlanGroup, ) - __all__ = [ "NautobotDnsARecord", diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py index 00125297a..628580399 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/base.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/base.py @@ -2,6 +2,7 @@ import uuid from typing import Optional + from diffsync import DiffSyncModel diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py index 2d9c4a63e..8c5523ffb 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/infoblox.py @@ -85,13 +85,8 @@ class InfobloxIPAddress(IPAddress): """Infoblox implementation of the VLAN Model.""" @classmethod - def create(cls, diffsync, ids, attrs): # pylint: disable=too-many-branches - """Creates IP Address Reservation. Additionally create DNS Host record or an A record. - - Optionally creates a PTR record in addition to an A record. - - DNS record creation requires the IP Address to have a DNS name - """ + def create(cls, diffsync, ids, attrs): + """Creates Fixed Address record.""" network_view = map_network_view_to_namespace(value=ids["namespace"], direction="ns_to_nv") ip_address = ids["address"] mac_address = attrs.get("mac_address") diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index 07145bb20..907901c3d 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -14,8 +14,8 @@ from nautobot.ipam.models import VLANGroup as OrmVlanGroup from nautobot_ssot.integrations.infoblox.choices import ( - FixedAddressTypeChoices, DNSRecordTypeChoices, + FixedAddressTypeChoices, NautobotDeletableModelChoices, ) from nautobot_ssot.integrations.infoblox.diffsync.models.base import ( diff --git a/nautobot_ssot/integrations/infoblox/forms.py b/nautobot_ssot/integrations/infoblox/forms.py index ae4566824..f10733e60 100644 --- a/nautobot_ssot/integrations/infoblox/forms.py +++ b/nautobot_ssot/integrations/infoblox/forms.py @@ -39,6 +39,7 @@ class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-anc dns_record_type = forms.ChoiceField( choices=DNSRecordTypeChoices, required=True, + label="DNS record type", widget=StaticSelect2(), ) infoblox_deletable_models = forms.MultipleChoiceField( diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index e72e392bd..0a7459d76 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -1007,9 +1007,6 @@ def get_all_dns_views(self): def create_a_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create an A record for a given FQDN. - Please note: This API call with work only for host records that do not have an associated a record. - If an a record already exists, this will return a 400 error. - Args: network_view (str): Name of the network view, e.g. 'dev' @@ -1252,7 +1249,7 @@ def get_all_subnets(self, prefix: str = None, ipv6: bool = False, network_view: return json_response def get_authoritative_zone(self, network_view: Optional[str] = None): - """Get authoritative zones to check if FQDN exists. + """Get authoritative zones. Args: network_view (str): Name of the network view, e.g. 'dev' @@ -1525,16 +1522,15 @@ def update_fixed_address(self, ref, data): """Update a fixed ip address within Infoblox. Args: - ref (str): Reference to Host record + ref (str): Reference to fixed address record Returns: Dict: Dictionary of _ref and name Return Response: { - - "_ref": "record:host/ZG5zLmhvc3QkLjEuY29tLmluZm9ibG94Lmhvc3Q:host.infoblox.com/default.test", - "name": "host.infoblox.com", + "_ref": "fixedaddress/ZG5zLmZpeGVkX2FkZHJlc3MkMTAuMjIwLjAuMy4wLi4:10.220.0.3/default", + "ipv4addr": "10.220.0.3" } """ params = {} @@ -1554,9 +1550,6 @@ def update_fixed_address(self, ref, data): def create_host_record(self, fqdn, ip_address, comment: Optional[str] = None, network_view: Optional[str] = None): """Create a host record for a given FQDN. - Please note: This API call with work only for host records that do not have an associated a record. - If an a record already exists, this will return a 400 error. - Args: network_view (str): Name of the network view, e.g. 'dev' @@ -1593,8 +1586,6 @@ def create_host_record(self, fqdn, ip_address, comment: Optional[str] = None, ne def update_host_record(self, ref, data): """Update a host record for a given FQDN. - Please note: This API call should only be used for host records that do not have an associated A record. - Args: ref (str): Reference to Host record @@ -1623,7 +1614,7 @@ def update_host_record(self, ref, data): return response.text def delete_host_record(self, ip_address, network_view: Optional[str] = None): - """Delete provided IP Address from Infoblox. + """Delete host record for provided IP Address from Infoblox. Args: network_view (str): Name of the network view, e.g. 'dev' @@ -1971,10 +1962,10 @@ def update_ptr_record(self, ref, data): # pylint: disable=inconsistent-return-s logger.debug(data) response = self._request("PUT", path=ref, params=params, json=data) except HTTPError as err: - logger.error("Could not update PTR address: %s for ref %s", err.response.text, ref) + logger.error("Could not update DNS PTR record: %s for ref %s", err.response.text, ref) return None try: - logger.debug("Infoblox PTR Address updated: %s", response.json()) + logger.debug("Infoblox DNS PTR record updated: %s", response.json()) results = response.json() return results except json.decoder.JSONDecodeError: @@ -2004,17 +1995,16 @@ def update_a_record(self, ref, data): # pylint: disable=inconsistent-return-sta logger.debug(data) response = self._request("PUT", path=ref, params=params, json=data) except HTTPError as err: - logger.error("Could not update DNS address: %s for ref %s", err.response.text, ref) + logger.error("Could not update DNS A record: %s for ref %s", err.response.text, ref) return None try: - logger.debug("Infoblox DNS Address updated: %s", response.json()) + logger.debug("Infoblox DNS A record updated: %s", response.json()) results = response.json() return results except json.decoder.JSONDecodeError: logger.error(response.text) return response.text - # Perhaps make multiple searches, or go through types returned by the search def update_ipaddress( self, ip_address, @@ -2037,8 +2027,6 @@ def update_ipaddress( "ipv4addr": "10.220.0.3" } """ - # resources = self._find_matching_resources("search", search_string=ip_address, objtype="fixedaddress") - # resources.extend(self._find_matching_resources("search", search_string=ip_address, objtype="record:host")) resources = self._find_matching_resources("search", address=ip_address) if not resources: return None @@ -2388,5 +2376,6 @@ def get_default_dns_view_for_network_view(self, network_view: str): _network_view = self.get_network_view(network_view) if _network_view and "associated_dns_views" in _network_view[0]: return _network_view[0]["associated_dns_views"][0] - logger.debug(_network_view) - return None + # There is no easy way to recover if the network view is somehow missing associated dns views. + # This should only really happen if there's no network view for the provided name. + raise ValueError("Error retrieving the default DNS View for Network View {network_view}.") From 27178d6b8c790aad3a064735eb3b74f07a073350 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 17 Jun 2024 16:22:01 +0100 Subject: [PATCH 160/229] Add toggle for enabling config for sync to Nautobot. --- nautobot_ssot/integrations/infoblox/jobs.py | 6 ++- nautobot_ssot/integrations/infoblox/models.py | 44 ++++++++++++++++--- .../integrations/infoblox/signals.py | 1 + nautobot_ssot/integrations/infoblox/tables.py | 1 + .../0009_ssotconfig_ssotinfobloxconfig.py | 3 +- .../ssotinfobloxconfig_retrieve.html | 4 ++ .../ssotinfobloxconfig_update.html | 1 + 7 files changed, 53 insertions(+), 7 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/jobs.py b/nautobot_ssot/integrations/infoblox/jobs.py index a4e3fb443..156dcf2f9 100644 --- a/nautobot_ssot/integrations/infoblox/jobs.py +++ b/nautobot_ssot/integrations/infoblox/jobs.py @@ -49,6 +49,7 @@ class InfobloxDataSource(DataSource): display_field="SSOT Infoblox config", required=True, query_params={ + "enable_sync_to_nautobot": True, "job_enabled": True, }, ) @@ -98,6 +99,9 @@ def run(self, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disa self.debug = debug self.dryrun = dryrun self.config = kwargs.get("config") + if not self.config.enable_sync_to_nautobot: + self.logger.error("Can't run sync to Nautobot, provided config doesn't have it enabled...") + raise ValueError("Config not enabled for sync to Nautobot.") self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) @@ -164,7 +168,7 @@ def run(self, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disa # Additional guard against launching sync to Infoblox with config that doesn't allow it if not self.config.enable_sync_to_infoblox: self.logger.error("Can't run sync to Infoblox, provided config doesn't have it enabled...") - return + raise ValueError("Config not enabled for sync to Infoblox.") self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) diff --git a/nautobot_ssot/integrations/infoblox/models.py b/nautobot_ssot/integrations/infoblox/models.py index 8838ac86f..7cf74891e 100644 --- a/nautobot_ssot/integrations/infoblox/models.py +++ b/nautobot_ssot/integrations/infoblox/models.py @@ -3,7 +3,6 @@ import ipaddress from django.core.exceptions import ValidationError - from django.core.serializers.json import DjangoJSONEncoder from django.db import models @@ -16,7 +15,12 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import SecretsGroupAssociation -from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices, DNSRecordTypeChoices +from nautobot_ssot.integrations.infoblox.choices import ( + DNSRecordTypeChoices, + FixedAddressTypeChoices, + InfobloxDeletableModelChoices, + NautobotDeletableModelChoices, +) def _get_default_sync_filters(): @@ -57,6 +61,9 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors enable_sync_to_infoblox = models.BooleanField( default=False, verbose_name="Sync to Infoblox", help_text="Enable syncing of data from Nautobot to Infoblox." ) + enable_sync_to_nautobot = models.BooleanField( + default=True, verbose_name="Sync to Nautobot", help_text="Enable syncing of data from Infoblox to Nautobot." + ) import_ip_addresses = models.BooleanField( default=False, verbose_name="Import IP Addresses", @@ -88,21 +95,29 @@ class SSOTInfobloxConfig(PrimaryModel): # pylint: disable=too-many-ancestors max_length=CHARFIELD_MAX_LENGTH, default=DNSRecordTypeChoices.HOST_RECORD, choices=DNSRecordTypeChoices, + verbose_name="DBS record type", help_text="Choose what type of Infoblox DNS record to create for IP Addresses.", ) fixed_address_type = models.CharField( max_length=CHARFIELD_MAX_LENGTH, default=FixedAddressTypeChoices.DONT_CREATE_RECORD, choices=FixedAddressTypeChoices, - help_text="Choose what type of Infoblox fixed IP address record to create.", + help_text="Choose what type of Infoblox fixed IP Address record to create.", ) job_enabled = models.BooleanField( default=False, verbose_name="Enabled for Sync Job", help_text="Enable use of this configuration in the sync jobs.", ) - infoblox_deletable_models = models.JSONField(encoder=DjangoJSONEncoder, default=list, blank=True) - nautobot_deletable_models = models.JSONField(encoder=DjangoJSONEncoder, default=list, blank=True) + infoblox_deletable_models = models.JSONField( + encoder=DjangoJSONEncoder, + default=list, + blank=True, + help_text="Model types that can be deleted in Infoblox.", + ) + nautobot_deletable_models = models.JSONField( + encoder=DjangoJSONEncoder, default=list, blank=True, help_text="Model types that can be deleted in Nautobot." + ) class Meta: """Meta class for SSOTInfobloxConfig.""" @@ -265,6 +280,24 @@ def _clean_cf_fields_ignore(self): }, ) + def _clean_deletable_model_types(self): + """Performs validation of infoblox_deletable_models and nautobot_deletable_models.""" + for model in self.infoblox_deletable_models: + if model not in InfobloxDeletableModelChoices.values(): + raise ValidationError( + { + "infoblox_deletable_models": f"Model `{model}` is not a valid choice.", + }, + ) + + for model in self.nautobot_deletable_models: + if model not in NautobotDeletableModelChoices.values(): + raise ValidationError( + { + "nautobot_deletable_models": f"Model `{model}` is not a valid choice.", + }, + ) + def clean(self): """Clean method for SSOTInfobloxConfig.""" super().clean() @@ -274,3 +307,4 @@ def clean(self): self._clean_import_ip() self._clean_infoblox_dns_view_mapping() self._clean_cf_fields_ignore() + self._clean_deletable_model_types() diff --git a/nautobot_ssot/integrations/infoblox/signals.py b/nautobot_ssot/integrations/infoblox/signals.py index 28b8783ed..9fedb522a 100644 --- a/nautobot_ssot/integrations/infoblox/signals.py +++ b/nautobot_ssot/integrations/infoblox/signals.py @@ -207,6 +207,7 @@ def nautobot_database_ready_callback( infoblox_wapi_version=str(config.get("infoblox_wapi_version", "v2.12")), infoblox_instance=external_integration, enable_sync_to_infoblox=bool(config.get("infoblox_enable_sync_to_infoblox", False)), + enable_sync_to_nautobot=True, import_ip_addresses=bool(config.get("infoblox_import_objects_ip_addresses", False)), import_subnets=bool(config.get("infoblox_import_objects_subnets", False)), import_vlan_views=bool(config.get("infoblox_import_objects_vlan_views", False)), diff --git a/nautobot_ssot/integrations/infoblox/tables.py b/nautobot_ssot/integrations/infoblox/tables.py index d3161752e..58542c984 100644 --- a/nautobot_ssot/integrations/infoblox/tables.py +++ b/nautobot_ssot/integrations/infoblox/tables.py @@ -13,6 +13,7 @@ class SSOTInfobloxConfigTable(BaseTable): name = tables.LinkColumn() infoblox_url = tables.Column(accessor="infoblox_instance__remote_url") enable_sync_to_infoblox = BooleanColumn(orderable=False) + enable_sync_to_nautobot = BooleanColumn(orderable=False) import_subnets = BooleanColumn(orderable=False) import_ip_addresses = BooleanColumn(orderable=False) import_vlan_views = BooleanColumn(orderable=False) diff --git a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py index 6d4e7de70..ae14c1560 100644 --- a/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py +++ b/nautobot_ssot/migrations/0009_ssotconfig_ssotinfobloxconfig.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-06-13 18:08 +# Generated by Django 3.2.23 on 2024-06-17 14:33 import django.core.serializers.json from django.db import migrations, models @@ -46,6 +46,7 @@ class Migration(migrations.Migration): ("description", models.CharField(blank=True, max_length=255)), ("infoblox_wapi_version", models.CharField(default="v2.12", max_length=255)), ("enable_sync_to_infoblox", models.BooleanField(default=False)), + ("enable_sync_to_nautobot", models.BooleanField(default=True)), ("import_ip_addresses", models.BooleanField(default=False)), ("import_subnets", models.BooleanField(default=False)), ("import_vlan_views", models.BooleanField(default=False)), diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html index a20a317b2..0075aac1f 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -52,6 +52,10 @@

    Enable Sync from Nautobot to Infoblox {{ object.enable_sync_to_infoblox }} + + Enable Sync from Infoblox to Nautobot + {{ object.enable_sync_to_nautobot }} + Import IP Addresses {{ object.import_ip_addresses }} diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html index 5cfbb2079..f8cc8219f 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_update.html @@ -11,6 +11,7 @@ {% render_field form.infoblox_wapi_version %} {% render_field form.job_enabled %} {% render_field form.enable_sync_to_infoblox %} + {% render_field form.enable_sync_to_nautobot %} {% render_field form.import_ip_addresses %} {% render_field form.import_subnets %} {% render_field form.import_vlan_views %} From da9bbe931bb85786956b9eb9c58291819a3741a7 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 17 Jun 2024 16:23:42 +0100 Subject: [PATCH 161/229] Update docs. Add changelogl. Add change fragments. --- changes/442.added | 12 + changes/442.changed | 4 + changes/442.documentation | 1 + changes/442.removed | 2 + docs/admin/integrations/infoblox_setup.md | 263 +++++++++++++--------- docs/admin/release_notes/version_2.7.md | 46 ++++ mkdocs.yml | 1 + 7 files changed, 226 insertions(+), 103 deletions(-) create mode 100644 changes/442.added create mode 100644 changes/442.changed create mode 100644 changes/442.documentation create mode 100644 changes/442.removed create mode 100644 docs/admin/release_notes/version_2.7.md diff --git a/changes/442.added b/changes/442.added new file mode 100644 index 000000000..6d0a3aecc --- /dev/null +++ b/changes/442.added @@ -0,0 +1,12 @@ +Added plugin configuration page collecting configurations for integrations. +Infoblox integration - added SSOTInfobloxConfig model used for providing Infoblox integration configuration. +Infoblox integration - added support for multiple configuration instances. +Infoblox integration - added support for Infoblox Network Views and Nautobot Namespaces. +Infoblox integration - added support for selecting a subset of Network and IP address objects loaded for synchronization. +Infoblox integration - added support for creating Infoblox IP Addresses as A and PTR records. +Infoblox integration - added support for creating Infoblox IP Addresses as Fixed Address records of type RESERVED and MAC_ADDRESS. +Infoblox integration - added support for excluding extensive attributes and custom fields when synchronizing objects. +Infoblox integration - added support for selectively enabling synchronization of IPv4 and IPv6 objects. +Infoblox integration - added support for specifying Infoblox DNS View where DNS records are created. +Infoblox integration - added support for specifying record types subject to deletion in Infoblox and Nautobot. +Infoblox integration - added methods to Infoblox handling fixed addresses, DNS A, Host and PTR records, network views, DNS views, and authoritative zones. diff --git a/changes/442.changed b/changes/442.changed new file mode 100644 index 000000000..e4f1de75f --- /dev/null +++ b/changes/442.changed @@ -0,0 +1,4 @@ +Infoblox integration - configuration settings are now defined in the instances of the SSOTInfobloxConfig model. +Infoblox integration - functionality provided by the `infoblox_import_subnets` setings has been replaced with the `infoblox_sync_filters` field in the SSOTInfobloxConfig instance. +Infoblox integration - updated Infoblox client methods to support Network View. +Infoblox integration - standardized `JSONDecoderError` handling in the Infoblox client. diff --git a/changes/442.documentation b/changes/442.documentation new file mode 100644 index 000000000..bb1fd3a38 --- /dev/null +++ b/changes/442.documentation @@ -0,0 +1 @@ +Add missing attribution for Device42 integration to README. diff --git a/changes/442.removed b/changes/442.removed new file mode 100644 index 000000000..579e5a660 --- /dev/null +++ b/changes/442.removed @@ -0,0 +1,2 @@ +Infoblox integration - configuration settings defined in `nautobot_config.py` have been removed. +Infoblox integration - configuration settings defined in environmental variables have been removed. \ No newline at end of file diff --git a/docs/admin/integrations/infoblox_setup.md b/docs/admin/integrations/infoblox_setup.md index 082019e8f..e4ffe06f6 100644 --- a/docs/admin/integrations/infoblox_setup.md +++ b/docs/admin/integrations/infoblox_setup.md @@ -4,7 +4,7 @@ This guide will walk you through the steps to set up Infoblox integration with t ## Prerequisites -Before configuring the integration, please ensure, that `nautobot-ssot` app was [installed with the Infoblox integration extra dependencies](../install.md#install-guide). +Before configuring the integration, please ensure, that the `nautobot-ssot` app was [installed with the Infoblox integration extra dependencies](../install.md#install-guide). ```shell pip install nautobot-ssot[infoblox] @@ -12,58 +12,169 @@ pip install nautobot-ssot[infoblox] ## Configuration -Integration behavior can be controlled with the following settings: - -| Setting | Default | Description | -| ------------------------------------------ | ------- | ----------------------------------------------------------------------------- | -| infoblox_url | N/A | URL of the Infoblox instance to sync with. | -| infoblox_username | N/A | The username to authenticate against Infoblox with. | -| infoblox_password | N/A | The password to authenticate against Infblox with. | -| infoblox_verify_ssl | True | Toggle SSL verification when syncing data with Infoblox. | -| infoblox_wapi_version | v2.12 | The version of the Infoblox API. | -| infoblox_enable_sync_to_infoblox | False | Add job to sync data from Nautobot into Infoblox. | -| infoblox_enable_rfc1918_network_containers | False | Add job to sync network containers to Nautobot (top level aggregates). | -| infoblox_default_status | active | Default Status to be assigned to imported objects. | -| infoblox_import_objects_ip_addresses | False | Import IP addresses from Infoblox to Nautobot. | -| infoblox_import_objects_subnets | False | Import subnets from Infoblox to Nautobot. | -| infoblox_import_objects_subnets_ipv6 | False | Import IPv6 subnets from Infoblox to Nautobot. | -| infoblox_import_objects_vlan_views | False | Import VLAN views from Infoblox to Nautobot. | -| infoblox_import_objects_vlans | False | Import VLANs from Infoblox to Nautobot. | -| infoblox_import_subnets | N/A | List of Subnets in CIDR string notation to filter import to. | -| infoblox_network_view | N/A | Only load IPAddresses from a specific Infoblox Network View. | -| infoblox_request_timeout | 60 | How long HTTP requests to Infoblox should wait for a response before failing. | - -Below is an example snippet from `nautobot_config.py` that demonstrates how to enable and configure Infoblox integration: - -```python -PLUGINS_CONFIG = { - "nautobot_ssot": { - "enable_infoblox": True, - "infoblox_default_status": os.getenv("NAUTOBOT_SSOT_INFOBLOX_DEFAULT_STATUS", "active"), - "infoblox_enable_rfc1918_network_containers": is_truthy( - os.getenv("NAUTOBOT_SSOT_INFOBLOX_ENABLE_RFC1918_NETWORK_CONTAINERS") - ), - "infoblox_enable_sync_to_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_ENABLE_SYNC_TO_INFOBLOX")), - "infoblox_import_objects_ip_addresses": is_truthy( - os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") - ), - "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), - "infoblox_import_objects_subnets_ipv6": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6")), - "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), - "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), - "infoblox_import_subnets": [x for x in os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(",") if x], - "infoblox_password": os.getenv("NAUTOBOT_SSOT_INFOBLOX_PASSWORD"), - "infoblox_url": os.getenv("NAUTOBOT_SSOT_INFOBLOX_URL"), - "infoblox_username": os.getenv("NAUTOBOT_SSOT_INFOBLOX_USERNAME"), - "infoblox_verify_ssl": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL", True)), - "infoblox_wapi_version": os.getenv("NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION", "v2.12"), - "infoblox_request_timeout": 120, +!!! note + Legacy configuration settings defined in the `nautobot_config.py` and environmental variables are deprecated. These settings are migrated on a best-effort basis on the first startup following migration to the Nautobot SSOT 2.7.0 or higher. + +Integration configuration is defined in the instance of the `SSOTInfobloxConfig` model. Multiple configuration instances are supported. Synchronization jobs take the `Config` parameter which specifies the configuration instance to use. + +To access integration configuration navigate to `Plugins -> Installed Plugins` and click on the cog icon in the `Single Source of Truth` entry. Then in the table `SSOT Integration Configs` click on the `Infoblox Configuration List` link. This will take you to the view where you can view/modify existing config instances or create new ones. + +Configuration instance contains the below settings: + +| Name | N/A | Unique name of the configuration instance. | +| Description | N/A | Description of the configuration instance. | +| Infoblox Instance Config | N/A | External Integration object describing remote Infoblox instance. | +| Infoblox WAPI Version | v2.12 | The version of the Infoblox API. | +| Enabled for Sync Job | False | Allows this config to be used in the sync jobs. | +| Sync to Infoblox | False | Allows this config to be used in the job syncing from Nautobot to Infoblox. | +| Sync to Nautobot | True | Allows this config to be used in the job syncing from Infoblox to Nautobot. | +| Import IP Addresses | False | Import IP addresses from the source to the target system. | +| Import Networks | False | Import IP networks from the source to the target system. | +| Import VLAN Views | False | Import VLAN Views from the source to the target system. | +| Import VLANs | False | Import VLANs from the source to the target system. | +| Import IPv4 | True | Import IPv4 objects from the source to the target system. | +| Import IPv6 | False | Import IPv6 objects from the source to the target system. | +| Fixed address type | Do not create record | Selects type of Fixed Address to create in Infoblox for imported IP Addresses. | +| DNS record type | Do not create record | Selects the type of DNS record to create in Infoblox for imported IP Addresses. | +| Default object status | Active | Default Status to be assigned to imported objects. | +| Infoblox - deletable models | [] | Infoblox model types whose instances are allowed to be deleted during sync. | +| Nautobot - deletable models | [] | Nautobot model types whose instances are allowed to be deleted during sync. | +| Infoblox Sync Filters | `[{"network_view": "default"}]` | Filters control what data is loaded from the source and target systems and considered for sync. | +| Infoblox Network View to DNS Mapping | `{}` | Map specifying Infoblox DNS View for each Network View where DNS records need to be created. +| Extensible Attributes/Custom Fields to Ignore | `{"custom_fields": [], "extensible_attributes": []}` | Specifies Nautobot custom fields and Infoblox extensible attributes that are excluded from the sync. | + +Each Infoblox configuration must be linked to an External Integration describing the Infoblox instance. The following External Integration fields must be defined for integration to work correctly: + +| Remote URL | URL of the remote Infoblox instance to sync with. | +| Verify SSL | Toggle SSL verification when syncing data with Infoblox. | +| Secrets Group | Secrets Group defining credentials used when connecting to the Infoblox instance. | +| Timeout | How long HTTP requests to Infoblox should wait for a response before failing. | + +The Secrets Group linked to the Infoblox External Integration must contain password and username secrets defined as per the below: + +| Access Type | Secret Type | +| REST | Password | +| REST | Username | + + +### Configuring Infoblox Sync Filters + +Infoblox Sync Filters is a mandatory setting used to control the scope of the IP objects that are loaded from Nautobot and Infoblox. Only these objects are in the scope of the synchronization process. The default value of this setting is: + +```json +[ + { + "network_view": "default" + } +] +``` + +This default value specifies that all IPv4 and IPv6 objects located in Infoblox "default" Network View or Nautobot "Global" Namespace, will loaded for comparison and considered for synchronization. + +Infoblox Sync Filters can contain multiple entries. Each entry is a dictionary with one mandatory key `network_view` and two optional keys `prefixes_ipv4` and `prefixes_ipv6`. + +- `network_view` specifies the name of the Infoblox Network View/Nautobot Namespace from which to load IP objects. There can be only one filter entry per network view name. +- `prefixes_ipv4` (optional) - a list of top-level IPv4 prefixes from which to load IPv4 networks and IP addresses. This applies to both Infoblox and Nautobot. If this key is not defined, all IPv4 addresses within the given namespace are allowed to be loaded. +- `prefixes_ipv6` (optional) - a list of top-level IPv6 prefixes from which to load IPv6 networks and IP addresses. This applies to both Infoblox and Nautobot. If this key is not defined, all IPv6 addresses within the given namespace are allowed to be loaded. + +Below is an example showing three filters used for filtering loaded data: + +```json +[ + { + "network_view": "default" + }, + { + "network_view": "dev", + "prefixes_ipv4": [ + "192.168.0.0/16" + ] + }, + { + "network_view": "test", + "prefixes_ipv4": [ + "10.0.0.0/8" + ], + "prefixes_ipv6": [ + "2001:5b0:4100::/40" + ] } +] +``` + +The above filters will allow the loading of the following data from Infoblox and Nautobot: + +- All IPv4 and IPv6 prefixes and IP addresses in the Infoblox network view "default" and Nautobot namespace "Global". +- Only IPv4 prefixes and IP addresses, contained within the `192.168.0.0/16` container, located in Infoblox network view "dev" and Nautobot namespace "dev". All IPv6 prefixes and IP addresses in the Infoblox network view "dev" and Nautobot namespace "dev". +- Only IPv4 prefixes and IP addresses, contained within the `10.0.0.0/8` container, located in Infoblox network view "test" and Nautobot namespace "test". Only IPv6 prefixes and IP addresses contained withing the `2001:5b0:4100::/40` container that are located in the Infoblox network view "test" and Nautobot namespace "test". + + +### Configuring Infoblox DNS View Mapping + +Infoblox DNS View Mapping is an optional setting that tells Infoblox SSOT where to create DNS Host, A, and PTR records. Infoblox allows multiple DNS Views to be defined for one Network View. If no mappings are configured the application will create DNS records in the default DNS View associated with the Network View, usually named `default.{network_view_name}`, where `network_view_name` is the name of the parent Network View. + +To define mapping specify the name of the Network View as the key and the name of the DNS View as the value. For example: + + +```json +{ + "dev": "dev view", + "default": "corporate", } ``` -!!! note - All integration settings are defined in the block above as an example. Only some will be needed as described below. +The above configuration will create DNS records linked to Network View "dev" in the "dev view" DNS View and records linked to Network View "default" in the "corporate" DNS View. + +### Configuring Extensible Attributes/Custom Fields to Ignore + +Extensible Attributes/Custom Fields to Ignore setting allows specifying Infoblox Extensive Attributes and Nautobot Custom Fields that are excluded from the synchronization. This stops unwanted extra data that is used for other purposes from being propagated between the systems. + +The default value of this setting is: + +```json +{ + "extensible_attributes": [], + "custom_fields": [] +} +``` + +That is, by default, all of the extensible attributes and custom fields will be synchronized, except the custom fields used internally by the Infoblox integration. + +To exclude Infoblox extensible attributes from being synchronized to Nautobot add the attribute names to the list `extensible_attributes` list. + +To exclude Infoblox custom fields from being synchronized to Infoblox add the custom field names to the list `custom_fields` list. + +## Custom Fields, Tags, and Relationships Used by The Infoblox Integration + +The Infoblox Integration requires the following Nautobot custom fields, tags, and relationships to function correctly. These are created automatically when Nautobot is started and care should be taken to ensure these are not deleted. + +### Custom Fields + +`dhcp_ranges` - Records DHCP ranges associated with a network. This applies to the following models: `Prefix`. +`ssot_synced_to_infoblox` - Records the date the Nautobot object was last synchronized to Infoblox. This applies to the following models: `IPAddress`, `Prefix`, `VLAN`, and `VLANGroup`. +`mac_address` - Records MAC address associated with an IP Address. This is required when creating an Infoblox Fixed Address of type MAC from Nautobot IP Address objects. This applies to the following model: `IPAddress`. +`fixed_address_comment` - Records comment for the corresponding Fixed Address record in Infoblox. This applies to the following model: `IPAddress`. +`dns_a_record_comment_custom_field` - Records comment for the corresponding DNS A record in Infoblox. This applies to the following model: `IPAddress`. +`dns_host_record_comment_custom_field` - Records comment for the corresponding DNS Host record in Infoblox. This applies to the following model: `IPAddress`. +`dns_ptr_record_comment_custom_field` - Records comment for the corresponding DNS PTR record in Infoblox. This applies to the following model: `IPAddress`. + + +### Tags + +`SSoT Synced from Infoblox` - Used to tag Nautobot objects that were synchronized from Infoblox. This applies to the following models: `IPAddress`, `Namespace`, `Prefix`, and `VLAN`. +`SSoT Synced to Infoblox` - Used to tag Nautobot objects that were synchronized to Infoblox. +This applies to the following models: `IPAddress`, `Prefix`, and `VLAN`. + + +### Relationships + +`prefix_to_vlan` - Used to link Nautobot Prefix to a Nautobot VLAN. This corresponds to an Infoblox Network to VLAN relationship. + +### Usage Notes + +- To create an Infoblox Fixed Address record from a Nautobot IP Address object the Nautobot side must have IP Address type set to `DHCP`. +- To create an Infoblox Fixed Address of type MAC the Nautobot IP Address must have a value defined in the `mac_address` custom field. + ## Upgrading from `nautobot-plugin-ssot-infoblox` App @@ -78,57 +189,3 @@ PLUGINS_CONFIG = { ```shell pip install --upgrade nautobot-ssot[infoblox] ``` -- Fix `nautobot_config.py` by removing `nautobot_ssot_infoblox` from `PLUGINS` and merging app configuration into `nautobot_ssot`: - ```python - PLUGINS = [ - "nautobot_ssot", - # "infoblox" # REMOVE THIS LINE - ] - - PLUGINS_CONFIG = { - # "nautobot_ssot_infoblox": { REMOVE THIS APP CONFIGURATION - # MOVE CONFIGURATION TO `nautobot_ssot` SECTION AND UPDATE KEYS - # "NAUTOBOT_INFOBLOX_URL": os.getenv("NAUTOBOT_INFOBLOX_URL", ""), - # "NAUTOBOT_INFOBLOX_USERNAME": os.getenv("NAUTOBOT_INFOBLOX_USERNAME", ""), - # "NAUTOBOT_INFOBLOX_PASSWORD": os.getenv("NAUTOBOT_INFOBLOX_PASSWORD", ""), - # "NAUTOBOT_INFOBLOX_VERIFY_SSL": os.getenv("NAUTOBOT_INFOBLOX_VERIFY_SSL", "true"), - # "NAUTOBOT_INFOBLOX_WAPI_VERSION": os.getenv("NAUTOBOT_INFOBLOX_WAPI_VERSION", "v2.12"), - # "enable_sync_to_infoblox": False, - # "enable_rfc1918_network_containers": False, - # "default_status": "active", - # "infoblox_import_objects": { - # "vlan_views": os.getenv("NAUTOBOT_INFOBLOX_IMPORT_VLAN_VIEWS", True), - # "vlans": os.getenv("NAUTOBOT_INFOBLOX_IMPORT_VLANS", True), - # "subnets": os.getenv("NAUTOBOT_INFOBLOX_INFOBLOX_IMPORT_SUBNETS", True), - # "ip_addresses": os.getenv("NAUTOBOT_INFOBLOX_IMPORT_IP_ADDRESSES", True), - # }, - # "infoblox_import_subnets": ["10.46.128.0/18", "192.168.1.0/24"], - # } - "nautobot_ssot": { - # Enable Infoblox integration - "enable_infoblox": True, - # Following lines are moved from `nautobot_ssot_infoblox` and prefixed with `infoblox_` - "infoblox_default_status": os.getenv("NAUTOBOT_SSOT_INFOBLOX_DEFAULT_STATUS", "active"), - "infoblox_enable_rfc1918_network_containers": is_truthy( - os.getenv("NAUTOBOT_SSOT_INFOBLOX_ENABLE_RFC1918_NETWORK_CONTAINERS") - ), - "infoblox_enable_sync_to_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_ENABLE_SYNC_TO_INFOBLOX")), - "infoblox_import_objects_ip_addresses": is_truthy( - os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") - ), - "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), - "infoblox_import_objects_subnets_ipv6": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6")), - "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), - "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), - "infoblox_import_subnets": [x for x in os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(",") if x], - "infoblox_password": os.getenv("NAUTOBOT_SSOT_INFOBLOX_PASSWORD"), - "infoblox_url": os.getenv("NAUTOBOT_SSOT_INFOBLOX_URL"), - "infoblox_username": os.getenv("NAUTOBOT_SSOT_INFOBLOX_USERNAME"), - "infoblox_verify_ssl": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL", True)), - "infoblox_wapi_version": os.getenv("NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION", "v2.12"), - } - } - ``` - -!!! note - Configuration keys are prefixed with `infoblox_`. diff --git a/docs/admin/release_notes/version_2.7.md b/docs/admin/release_notes/version_2.7.md new file mode 100644 index 000000000..45bc14599 --- /dev/null +++ b/docs/admin/release_notes/version_2.7.md @@ -0,0 +1,46 @@ + +# v2.7 Release Notes + +## [v2.7.0 ()](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v2.7.0) + +### Added + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Added SSOTConfig model and view for exposing configurations of individual integrations. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added Namespace, DnsARecord, DnsHostRecord and DnsPTRRecord diffsync models. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - modified Network and IPAddress models to support namespaces and creation of additional IP Address record types. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - synchronization jobs have a new mandatory field called `Config`. This field specifies which Infoblox Config to use with the job. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - Full support for Infoblox network views and Nautobot namespace has been added. Multiple network views/namespaces and their IP objects can now be safely loaded. This allows for importing overlapping prefixes from Infoblox that are assigned to corresponding Namespaces in Nautobot. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for excluding extensible attributes and custom fields from sync. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added configuration setting that specifies the mapping between network view and DNS view. This is required to correctly create DNS records in Infoblox. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for specifying a subset of IP Prefixes and IP Addresses loaded for synchronization. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for creating Infoblox IP Addresses as either Host or A records. An optional PTR record can be created alongside A record. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for updating Infoblox Fixed Address, and DNS Host, A, and PTR records. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for creating/updating IP Addresses in Infoblox as Fixed Address of type RESERVED or MAC_ADDRESS. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for specifying record types that can be deleted in Infoblox and Nautobot. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added multiple new methods in the Infoblox client for dealing with fixed addresses, DNS A, Host and PTR records, network views, DNS views and authoritative zones. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added the following custom fields to support new functionality: `mac_address`, `fixed_address_comment`, `dns_a_record_comment`, `dns_host_record_comment`, `dns_ptr_record_comment`. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added check for the minimum version of Nautobot. This release requires Nautobot 2.1 or greater. + +### Changed + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - configuration is no longer defined in `nautobot_config.py`. Configuration is now defined in the SSOT Infoblox Config object. This can be set up in UI or using Django ORM. + - The existing configuration is taken from `nautobot_config.py` will be automatically migrated to the SSOT Infoblox Config object named `InfobloxConfigDefault`. + - Configuration of the Infoblox instance is now recorded in the ExternalIntegration object. The existing configuration will be automatically migrated to the instance named `DefaultInfobloxInstance`. + - Credentials are now defined in the Secrets Group. The migrated configuration expects the username to come from the `NAUTOBOT_SSOT_INFOBLOX_USERNAME` env var and the password to come from the `NAUTOBOT_SSOT_INFOBLOX_PASSWORD` env var. To use a different method of providing credentials modify secrets attached to the `InfobloxSSOTDefaultSecretGroup` SecretsGroup. + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Default behavior when loading prefixes has changed. Previously all prefixes from all Infoblox network views were loaded by default, with duplicate prefixes removed. This process was non-deterministic and resulted in all Infoblox prefixes assigned to the "Global" namespace in Nautobot. Infoblox integration now enforces the use of the `infoblox_sync_filters` setting, defined in the Infoblox Config, with the default value set to `[{"network_view": "default"}]`. This default setting results in loading all of the prefixes from the Infoblox network view "default" only and assigning them to the "Global" namespace in Infoblox. See Infoblox sync filter documentation for details on how to define filters. +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - standardized and improved error handling in the Infoblox client. + + +### Removed + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - environmental variables used to configure the integration have been deprecated. + +### Fixed + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - The Infoblox client logging level now honors the `debug` job option. + +### Housekeeping + +- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Increased test coverage. + diff --git a/mkdocs.yml b/mkdocs.yml index ded5b3db3..3081d12c6 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -132,6 +132,7 @@ nav: - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" + - v2.7: "admin/release_notes/version_2.7.md" - v2.6: "admin/release_notes/version_2.6.md" - v2.5: "admin/release_notes/version_2.5.md" - v2.4: "admin/release_notes/version_2.4.md" From 91c9d891e5d11865e7862001843100ac5ea8a13b Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 18 Jun 2024 10:23:58 +0100 Subject: [PATCH 162/229] Fix test indentation. --- .../tests/infoblox/test_nautobot_models.py | 88 +++++++++---------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py index c7177eff4..6a6f67dc7 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_models.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -202,50 +202,50 @@ def setUp(self): ) self.infoblox_adapter.add(inf_ds_namespace) - mac_address_custom_field, _ = CustomField.objects.get_or_create( - type=CustomFieldTypeChoices.TYPE_TEXT, - key="mac_address", - defaults={ - "label": "MAC Address", - }, - ) - mac_address_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) - - fixed_address_comment_custom_field, _ = CustomField.objects.get_or_create( - type=CustomFieldTypeChoices.TYPE_TEXT, - key="fixed_address_comment", - defaults={ - "label": "Fixed Address Comment", - }, - ) - fixed_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) - - dns_a_record_comment_custom_field, _ = CustomField.objects.get_or_create( - type=CustomFieldTypeChoices.TYPE_TEXT, - key="dns_a_record_comment", - defaults={ - "label": "DNS A Record Comment", - }, - ) - dns_a_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) - - dns_host_record_comment_custom_field, _ = CustomField.objects.get_or_create( - type=CustomFieldTypeChoices.TYPE_TEXT, - key="dns_host_record_comment", - defaults={ - "label": "DNS Host Record Comment", - }, - ) - dns_host_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) - - dns_ptr_record_comment_custom_field, _ = CustomField.objects.get_or_create( - type=CustomFieldTypeChoices.TYPE_TEXT, - key="dns_ptr_record_comment", - defaults={ - "label": "DNS PTR Record Comment", - }, - ) - dns_ptr_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + mac_address_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="mac_address", + defaults={ + "label": "MAC Address", + }, + ) + mac_address_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + fixed_address_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="fixed_address_comment", + defaults={ + "label": "Fixed Address Comment", + }, + ) + fixed_address_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_a_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_a_record_comment", + defaults={ + "label": "DNS A Record Comment", + }, + ) + dns_a_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_host_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_host_record_comment", + defaults={ + "label": "DNS Host Record Comment", + }, + ) + dns_host_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) + + dns_ptr_record_comment_custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + key="dns_ptr_record_comment", + defaults={ + "label": "DNS PTR Record Comment", + }, + ) + dns_ptr_record_comment_custom_field.content_types.add(ContentType.objects.get_for_model(IPAddress)) def test_ip_address_create_from_fixed_address_reserved(self): """Validate ip address gets created from Infoblox fixed address reservation.""" From 7404a840b8e197c17e1c6658442a9d6150b4e13d Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 18 Jun 2024 11:01:41 +0100 Subject: [PATCH 163/229] Create relationships for tests. --- nautobot_ssot/tests/infoblox/test_nautobot_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py index 6a6f67dc7..be84aa7a1 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_models.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -15,8 +15,7 @@ ) from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter - -from .fixtures_infoblox import create_default_infoblox_config +from nautobot_ssot.tests.infoblox.fixtures_infoblox import create_default_infoblox_config, create_prefix_relationship def _get_ip_address_dict(attrs): @@ -108,6 +107,7 @@ class TestModelNautobotNetwork(TestCase): def setUp(self): "Test class set up." + create_prefix_relationship() self.config = create_default_infoblox_config() self.config.infoblox_sync_filters = [{"network_view": "default"}, {"network_view": "dev"}] self.namespace_dev, _ = Namespace.objects.get_or_create(name="dev") From 34456652accc074852f4c2d5b461f5adfdf2d5e3 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Tue, 18 Jun 2024 13:17:10 +0100 Subject: [PATCH 164/229] Create relationships for IPAddress tests. --- nautobot_ssot/tests/infoblox/test_nautobot_models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_models.py b/nautobot_ssot/tests/infoblox/test_nautobot_models.py index be84aa7a1..79970ba6b 100644 --- a/nautobot_ssot/tests/infoblox/test_nautobot_models.py +++ b/nautobot_ssot/tests/infoblox/test_nautobot_models.py @@ -185,6 +185,7 @@ class TestModelNautobotIPAddress(TestCase): def setUp(self): "Test class set up." + create_prefix_relationship() self.config = create_default_infoblox_config() self.config.infoblox_sync_filters = [{"network_view": "default"}, {"network_view": "dev"}] self.namespace_dev, _ = Namespace.objects.get_or_create(name="dev") From 1b1155dc06ff9d73b69928a83712fee7ae811b1f Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Thu, 20 Jun 2024 12:54:11 +0100 Subject: [PATCH 165/229] Add missing import_subnets field. --- .../nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html index 0075aac1f..8de7cb409 100644 --- a/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html +++ b/nautobot_ssot/templates/nautobot_ssot_infoblox/ssotinfobloxconfig_retrieve.html @@ -56,6 +56,10 @@

    Enable Sync from Infoblox to Nautobot {{ object.enable_sync_to_nautobot }} + + Import Networks + {{ object.import_subnets }} + Import IP Addresses {{ object.import_ip_addresses }} From 5d800aa00a43d3d571808962e98505988d337c20 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 24 Jun 2024 13:54:28 +0100 Subject: [PATCH 166/229] Update docs as per the PR review. --- docs/admin/integrations/infoblox_setup.md | 58 ++++++++++++----------- docs/admin/release_notes/version_2.7.md | 46 ------------------ 2 files changed, 30 insertions(+), 74 deletions(-) delete mode 100644 docs/admin/release_notes/version_2.7.md diff --git a/docs/admin/integrations/infoblox_setup.md b/docs/admin/integrations/infoblox_setup.md index e4ffe06f6..4d76bb98c 100644 --- a/docs/admin/integrations/infoblox_setup.md +++ b/docs/admin/integrations/infoblox_setup.md @@ -17,44 +17,46 @@ pip install nautobot-ssot[infoblox] Integration configuration is defined in the instance of the `SSOTInfobloxConfig` model. Multiple configuration instances are supported. Synchronization jobs take the `Config` parameter which specifies the configuration instance to use. -To access integration configuration navigate to `Plugins -> Installed Plugins` and click on the cog icon in the `Single Source of Truth` entry. Then in the table `SSOT Integration Configs` click on the `Infoblox Configuration List` link. This will take you to the view where you can view/modify existing config instances or create new ones. +To access integration configuration navigate to `Apps -> Installed Apps` and click on the cog icon in the `Single Source of Truth` entry. Then in the table `SSOT Integration Configs` click on the `Infoblox Configuration List` link. This will take you to the view where you can view/modify existing config instances or create new ones. Configuration instance contains the below settings: -| Name | N/A | Unique name of the configuration instance. | -| Description | N/A | Description of the configuration instance. | -| Infoblox Instance Config | N/A | External Integration object describing remote Infoblox instance. | -| Infoblox WAPI Version | v2.12 | The version of the Infoblox API. | -| Enabled for Sync Job | False | Allows this config to be used in the sync jobs. | -| Sync to Infoblox | False | Allows this config to be used in the job syncing from Nautobot to Infoblox. | -| Sync to Nautobot | True | Allows this config to be used in the job syncing from Infoblox to Nautobot. | -| Import IP Addresses | False | Import IP addresses from the source to the target system. | -| Import Networks | False | Import IP networks from the source to the target system. | -| Import VLAN Views | False | Import VLAN Views from the source to the target system. | -| Import VLANs | False | Import VLANs from the source to the target system. | -| Import IPv4 | True | Import IPv4 objects from the source to the target system. | -| Import IPv6 | False | Import IPv6 objects from the source to the target system. | -| Fixed address type | Do not create record | Selects type of Fixed Address to create in Infoblox for imported IP Addresses. | -| DNS record type | Do not create record | Selects the type of DNS record to create in Infoblox for imported IP Addresses. | -| Default object status | Active | Default Status to be assigned to imported objects. | -| Infoblox - deletable models | [] | Infoblox model types whose instances are allowed to be deleted during sync. | -| Nautobot - deletable models | [] | Nautobot model types whose instances are allowed to be deleted during sync. | -| Infoblox Sync Filters | `[{"network_view": "default"}]` | Filters control what data is loaded from the source and target systems and considered for sync. | -| Infoblox Network View to DNS Mapping | `{}` | Map specifying Infoblox DNS View for each Network View where DNS records need to be created. +| Setting | Default | Description | +| Name | N/A | Unique name of the configuration instance. | +| Description | N/A | Description of the configuration instance. | +| Infoblox Instance Config | N/A | External Integration object describing remote Infoblox instance. | +| Infoblox WAPI Version | v2.12 | The version of the Infoblox API. | +| Enabled for Sync Job | False | Allows this config to be used in the sync jobs. | +| Sync to Infoblox | False | Allows this config to be used in the job syncing from Nautobot to Infoblox. | +| Sync to Nautobot | True | Allows this config to be used in the job syncing from Infoblox to Nautobot. | +| Import IP Addresses | False | Import IP addresses from the source to the target system. | +| Import Networks | False | Import IP networks from the source to the target system. | +| Import VLAN Views | False | Import VLAN Views from the source to the target system. | +| Import VLANs | False | Import VLANs from the source to the target system. | +| Import IPv4 | True | Import IPv4 objects from the source to the target system. | +| Import IPv6 | False | Import IPv6 objects from the source to the target system. | +| Fixed address type | Do not create record | Selects type of Fixed Address to create in Infoblox for imported IP Addresses. | +| DNS record type | Do not create record | Selects the type of DNS record to create in Infoblox for imported IP Addresses. | +| Default object status | Active | Default Status to be assigned to imported objects. | +| Infoblox - deletable models | [] | Infoblox model types whose instances are allowed to be deleted during sync. | +| Nautobot - deletable models | [] | Nautobot model types whose instances are allowed to be deleted during sync. | +| Infoblox Sync Filters | `[{"network_view": "default"}]` | Filters control what data is loaded from the source and target systems and considered for sync. | +| Infoblox Network View to DNS Mapping | `{}`| Map specifying Infoblox DNS View for each Network View where DNS records need to be created. | Extensible Attributes/Custom Fields to Ignore | `{"custom_fields": [], "extensible_attributes": []}` | Specifies Nautobot custom fields and Infoblox extensible attributes that are excluded from the sync. | Each Infoblox configuration must be linked to an External Integration describing the Infoblox instance. The following External Integration fields must be defined for integration to work correctly: -| Remote URL | URL of the remote Infoblox instance to sync with. | -| Verify SSL | Toggle SSL verification when syncing data with Infoblox. | +| Setting | Description | +| Remote URL | URL of the remote Infoblox instance to sync with. | +| Verify SSL | Toggle SSL verification when syncing data with Infoblox. | | Secrets Group | Secrets Group defining credentials used when connecting to the Infoblox instance. | -| Timeout | How long HTTP requests to Infoblox should wait for a response before failing. | +| Timeout | How long HTTP requests to Infoblox should wait for a response before failing. | The Secrets Group linked to the Infoblox External Integration must contain password and username secrets defined as per the below: -| Access Type | Secret Type | -| REST | Password | -| REST | Username | +| Access Type | Secret Type | +| REST | Password | +| REST | Username | ### Configuring Infoblox Sync Filters @@ -127,7 +129,7 @@ The above configuration will create DNS records linked to Network View "dev" in ### Configuring Extensible Attributes/Custom Fields to Ignore -Extensible Attributes/Custom Fields to Ignore setting allows specifying Infoblox Extensive Attributes and Nautobot Custom Fields that are excluded from the synchronization. This stops unwanted extra data that is used for other purposes from being propagated between the systems. +Extensible Attributes/Custom Fields to Ignore setting allows specifying Infoblox Extensible Attributes and Nautobot Custom Fields that are excluded from the synchronization. This stops unwanted extra data that is used for other purposes from being propagated between the systems. The default value of this setting is: diff --git a/docs/admin/release_notes/version_2.7.md b/docs/admin/release_notes/version_2.7.md deleted file mode 100644 index 45bc14599..000000000 --- a/docs/admin/release_notes/version_2.7.md +++ /dev/null @@ -1,46 +0,0 @@ - -# v2.7 Release Notes - -## [v2.7.0 ()](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v2.7.0) - -### Added - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Added SSOTConfig model and view for exposing configurations of individual integrations. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added Namespace, DnsARecord, DnsHostRecord and DnsPTRRecord diffsync models. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - modified Network and IPAddress models to support namespaces and creation of additional IP Address record types. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - synchronization jobs have a new mandatory field called `Config`. This field specifies which Infoblox Config to use with the job. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - Full support for Infoblox network views and Nautobot namespace has been added. Multiple network views/namespaces and their IP objects can now be safely loaded. This allows for importing overlapping prefixes from Infoblox that are assigned to corresponding Namespaces in Nautobot. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for excluding extensible attributes and custom fields from sync. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added configuration setting that specifies the mapping between network view and DNS view. This is required to correctly create DNS records in Infoblox. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for specifying a subset of IP Prefixes and IP Addresses loaded for synchronization. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for creating Infoblox IP Addresses as either Host or A records. An optional PTR record can be created alongside A record. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for updating Infoblox Fixed Address, and DNS Host, A, and PTR records. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for creating/updating IP Addresses in Infoblox as Fixed Address of type RESERVED or MAC_ADDRESS. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added support for specifying record types that can be deleted in Infoblox and Nautobot. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added multiple new methods in the Infoblox client for dealing with fixed addresses, DNS A, Host and PTR records, network views, DNS views and authoritative zones. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added the following custom fields to support new functionality: `mac_address`, `fixed_address_comment`, `dns_a_record_comment`, `dns_host_record_comment`, `dns_ptr_record_comment`. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - added check for the minimum version of Nautobot. This release requires Nautobot 2.1 or greater. - -### Changed - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - configuration is no longer defined in `nautobot_config.py`. Configuration is now defined in the SSOT Infoblox Config object. This can be set up in UI or using Django ORM. - - The existing configuration is taken from `nautobot_config.py` will be automatically migrated to the SSOT Infoblox Config object named `InfobloxConfigDefault`. - - Configuration of the Infoblox instance is now recorded in the ExternalIntegration object. The existing configuration will be automatically migrated to the instance named `DefaultInfobloxInstance`. - - Credentials are now defined in the Secrets Group. The migrated configuration expects the username to come from the `NAUTOBOT_SSOT_INFOBLOX_USERNAME` env var and the password to come from the `NAUTOBOT_SSOT_INFOBLOX_PASSWORD` env var. To use a different method of providing credentials modify secrets attached to the `InfobloxSSOTDefaultSecretGroup` SecretsGroup. - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Default behavior when loading prefixes has changed. Previously all prefixes from all Infoblox network views were loaded by default, with duplicate prefixes removed. This process was non-deterministic and resulted in all Infoblox prefixes assigned to the "Global" namespace in Nautobot. Infoblox integration now enforces the use of the `infoblox_sync_filters` setting, defined in the Infoblox Config, with the default value set to `[{"network_view": "default"}]`. This default setting results in loading all of the prefixes from the Infoblox network view "default" only and assigning them to the "Global" namespace in Infoblox. See Infoblox sync filter documentation for details on how to define filters. -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - standardized and improved error handling in the Infoblox client. - - -### Removed - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Infoblox integration - environmental variables used to configure the integration have been deprecated. - -### Fixed - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - The Infoblox client logging level now honors the `debug` job option. - -### Housekeeping - -- [#442](https://github.com/nautobot/nautobot-app-ssot/pull/442) - Increased test coverage. - From 558d1fe1ea51c70cef60db31bd65abb3d726f9d8 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 24 Jun 2024 14:01:14 +0100 Subject: [PATCH 167/229] Remove reference from mkdocs.yml. --- mkdocs.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index 3081d12c6..ded5b3db3 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -132,7 +132,6 @@ nav: - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" - - v2.7: "admin/release_notes/version_2.7.md" - v2.6: "admin/release_notes/version_2.6.md" - v2.5: "admin/release_notes/version_2.5.md" - v2.4: "admin/release_notes/version_2.4.md" From 63f90c7267eaed5157370726cfb12aaafc6ba1f1 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Mon, 24 Jun 2024 15:51:35 +0100 Subject: [PATCH 168/229] Only retrieve FORWARD zones. --- nautobot_ssot/integrations/infoblox/utils/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 0a7459d76..95885b34a 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -1310,6 +1310,7 @@ def get_authoritative_zones_for_dns_view(self, view: str): url_path = "zone_auth" params = { "view": view, + "zone_format": "FORWARD", "_return_fields": "fqdn,view", "_return_as_object": 1, } From f036b9548b7b84b112671509cd6881d8cb823a15 Mon Sep 17 00:00:00 2001 From: Ken Celenza Date: Tue, 25 Jun 2024 13:38:26 -0400 Subject: [PATCH 169/229] Update servicenow_setup.md --- docs/admin/integrations/servicenow_setup.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/admin/integrations/servicenow_setup.md b/docs/admin/integrations/servicenow_setup.md index 6c1bbc891..a685078bc 100644 --- a/docs/admin/integrations/servicenow_setup.md +++ b/docs/admin/integrations/servicenow_setup.md @@ -80,3 +80,11 @@ PLUGINS_CONFIG = { !!! note Configuration keys are prefixed with `servicenow_`. + +## Why did my job fail with an `IncompleteJSONError`? + +``` +An exception occurred: `IncompleteJSONError: lexical error: invalid char in json text. Date: Tue, 25 Jun 2024 15:35:05 -0400 Subject: [PATCH 170/229] Update servicenow.md --- docs/user/integrations/servicenow.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user/integrations/servicenow.md b/docs/user/integrations/servicenow.md index 5cf8dbcf1..72d5bfb7b 100644 --- a/docs/user/integrations/servicenow.md +++ b/docs/user/integrations/servicenow.md @@ -4,7 +4,7 @@ This integration provides the ability to synchronize basic data from Nautobot in - Nautobot Manufacturer table to ServiceNow Company table - Nautobot DeviceType table to ServiceNow Hardware Product Model table -- Nautobot Region and Site tables to ServiceNow Location table +- Nautobot Locations tables to ServiceNow Location table - Nautobot Device table to ServiceNow IP Switch table - Nautobot Interface table to ServiceNow Interface table From cff3b4112760754b78f771f608d4627e47b20cd6 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Tue, 25 Jun 2024 21:47:56 -0400 Subject: [PATCH 171/229] Add doc fragment --- changes/472.documentation | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/472.documentation diff --git a/changes/472.documentation b/changes/472.documentation new file mode 100644 index 000000000..3e14c04d7 --- /dev/null +++ b/changes/472.documentation @@ -0,0 +1 @@ +Update ServiceNow documentation for Locations and FAQ error. From f07d6d6daea0e306c6684d7b07eba697a2f6a8b2 Mon Sep 17 00:00:00 2001 From: itdependsnetworks Date: Tue, 25 Jun 2024 21:50:50 -0400 Subject: [PATCH 172/229] move to the faq --- docs/admin/integrations/servicenow_setup.md | 7 ------- docs/user/faq.md | 8 ++++++++ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/admin/integrations/servicenow_setup.md b/docs/admin/integrations/servicenow_setup.md index a685078bc..11de64353 100644 --- a/docs/admin/integrations/servicenow_setup.md +++ b/docs/admin/integrations/servicenow_setup.md @@ -81,10 +81,3 @@ PLUGINS_CONFIG = { !!! note Configuration keys are prefixed with `servicenow_`. -## Why did my job fail with an `IncompleteJSONError`? - -``` -An exception occurred: `IncompleteJSONError: lexical error: invalid char in json text. Date: Wed, 26 Jun 2024 14:28:38 +0100 Subject: [PATCH 173/229] Update changes/442.changed Co-authored-by: Adam Byczkowski <38091261+qduk@users.noreply.github.com> --- changes/442.changed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changes/442.changed b/changes/442.changed index e4f1de75f..c531bc690 100644 --- a/changes/442.changed +++ b/changes/442.changed @@ -1,4 +1,4 @@ Infoblox integration - configuration settings are now defined in the instances of the SSOTInfobloxConfig model. -Infoblox integration - functionality provided by the `infoblox_import_subnets` setings has been replaced with the `infoblox_sync_filters` field in the SSOTInfobloxConfig instance. +Infoblox integration - functionality provided by the `infoblox_import_subnets` settings has been replaced with the `infoblox_sync_filters` field in the SSOTInfobloxConfig instance. Infoblox integration - updated Infoblox client methods to support Network View. Infoblox integration - standardized `JSONDecoderError` handling in the Infoblox client. From 94ab9a8b9f763c31a1c02a2e63031fddcdc15a90 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 26 Jun 2024 14:28:51 +0100 Subject: [PATCH 174/229] Update docs/admin/integrations/infoblox_setup.md Co-authored-by: Adam Byczkowski <38091261+qduk@users.noreply.github.com> --- docs/admin/integrations/infoblox_setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/integrations/infoblox_setup.md b/docs/admin/integrations/infoblox_setup.md index 4d76bb98c..20088123b 100644 --- a/docs/admin/integrations/infoblox_setup.md +++ b/docs/admin/integrations/infoblox_setup.md @@ -71,7 +71,7 @@ Infoblox Sync Filters is a mandatory setting used to control the scope of the IP ] ``` -This default value specifies that all IPv4 and IPv6 objects located in Infoblox "default" Network View or Nautobot "Global" Namespace, will loaded for comparison and considered for synchronization. +This default value specifies that all IPv4 and IPv6 objects located in Infoblox "default" Network View or Nautobot "Global" Namespace, will be loaded for comparison and considered for synchronization. Infoblox Sync Filters can contain multiple entries. Each entry is a dictionary with one mandatory key `network_view` and two optional keys `prefixes_ipv4` and `prefixes_ipv6`. From 8d9807aa219391089d8aa9957e68bd7baa64f0f9 Mon Sep 17 00:00:00 2001 From: Przemek Rogala Date: Wed, 26 Jun 2024 15:16:43 +0100 Subject: [PATCH 175/229] Add label for fixed address type form field. --- nautobot_ssot/integrations/infoblox/forms.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/infoblox/forms.py b/nautobot_ssot/integrations/infoblox/forms.py index f10733e60..c194cb18c 100644 --- a/nautobot_ssot/integrations/infoblox/forms.py +++ b/nautobot_ssot/integrations/infoblox/forms.py @@ -34,6 +34,7 @@ class SSOTInfobloxConfigForm(NautobotModelForm): # pylint: disable=too-many-anc fixed_address_type = forms.ChoiceField( choices=FixedAddressTypeChoices, required=True, + label="Fixed Address type", widget=StaticSelect2(), ) dns_record_type = forms.ChoiceField( From 54b8e6efc0035fc3e44dc684789148a396a8f73b Mon Sep 17 00:00:00 2001 From: jtdub Date: Sat, 13 Apr 2024 23:03:35 -0500 Subject: [PATCH 176/229] Rebase --- development/development.env | 2 + development/nautobot_config.py | 15 ++--- nautobot_ssot/__init__.py | 1 + .../integrations/itential/__init__.py | 1 + .../integrations/itential/api/__init__.py | 1 + .../integrations/itential/api/serializers.py | 19 +++++++ .../integrations/itential/filters.py | 17 ++++++ nautobot_ssot/integrations/itential/forms.py | 43 ++++++++++++++ nautobot_ssot/integrations/itential/models.py | 49 ++++++++++++++++ nautobot_ssot/integrations/itential/tables.py | 25 +++++++++ nautobot_ssot/integrations/itential/urls.py | 10 ++++ nautobot_ssot/integrations/itential/views.py | 17 ++++++ .../migrations/0009_automationgatewaymodel.py | 56 +++++++++++++++++++ nautobot_ssot/models.py | 2 + 14 files changed, 251 insertions(+), 7 deletions(-) create mode 100644 nautobot_ssot/integrations/itential/__init__.py create mode 100644 nautobot_ssot/integrations/itential/api/__init__.py create mode 100644 nautobot_ssot/integrations/itential/api/serializers.py create mode 100644 nautobot_ssot/integrations/itential/filters.py create mode 100644 nautobot_ssot/integrations/itential/forms.py create mode 100644 nautobot_ssot/integrations/itential/models.py create mode 100644 nautobot_ssot/integrations/itential/tables.py create mode 100644 nautobot_ssot/integrations/itential/urls.py create mode 100644 nautobot_ssot/integrations/itential/views.py create mode 100644 nautobot_ssot/migrations/0009_automationgatewaymodel.py diff --git a/development/development.env b/development/development.env index 8f23d010e..930f62db5 100644 --- a/development/development.env +++ b/development/development.env @@ -96,3 +96,5 @@ NAUTOBOT_SSOT_ENABLE_IPFABRIC="False" IPFABRIC_HOST="https://ipfabric.example.com" IPFABRIC_SSL_VERIFY="True" IPFABRIC_TIMEOUT=15 + +NAUTOBOT_SSOT_ENABLE_ITENTIAL="True" diff --git a/development/nautobot_config.py b/development/nautobot_config.py index b33319dc8..ff9e7973e 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -191,13 +191,14 @@ "atl01": "Atlanta", }, "aristacv_verify": is_truthy(os.getenv("NAUTOBOT_ARISTACV_VERIFY", True)), - "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI", "false")), - "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV", "false")), - "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42", "false")), - "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX", "false")), - "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC", "false")), - "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW", "false")), - "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS", "true")), + "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI")), + "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV")), + "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42")), + "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX")), + "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC")), + "enable_itential": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ITENTIAL")), + "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW")), + "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), "device42_host": os.getenv("NAUTOBOT_SSOT_DEVICE42_HOST", ""), "device42_username": os.getenv("NAUTOBOT_SSOT_DEVICE42_USERNAME", ""), "device42_password": os.getenv("NAUTOBOT_SSOT_DEVICE42_PASSWORD", ""), diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index 7c3d62f52..12e735e8d 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -118,6 +118,7 @@ class NautobotSSOTAppConfig(NautobotAppConfig): "enable_infoblox": False, "enable_ipfabric": False, "enable_servicenow": False, + "enable_itential": False, "hide_example_jobs": True, "ipfabric_api_token": "", "ipfabric_host": "", diff --git a/nautobot_ssot/integrations/itential/__init__.py b/nautobot_ssot/integrations/itential/__init__.py new file mode 100644 index 000000000..fcdfba3e8 --- /dev/null +++ b/nautobot_ssot/integrations/itential/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT.""" diff --git a/nautobot_ssot/integrations/itential/api/__init__.py b/nautobot_ssot/integrations/itential/api/__init__.py new file mode 100644 index 000000000..0e72a39c0 --- /dev/null +++ b/nautobot_ssot/integrations/itential/api/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT Api.""" diff --git a/nautobot_ssot/integrations/itential/api/serializers.py b/nautobot_ssot/integrations/itential/api/serializers.py new file mode 100644 index 000000000..a5d0b9e6c --- /dev/null +++ b/nautobot_ssot/integrations/itential/api/serializers.py @@ -0,0 +1,19 @@ +"""Itential SSoT serializers.""" + +from rest_framework import serializers + +from nautobot.apps.api import NautobotModelSerializer + +from nautobot_ssot.integrations.itential import models + + +class AutomationGatewayModelSerializer(NautobotModelSerializer): + """AutomationGatewayModel serializer.""" + + url = serializers.HyperlinkedIdentityField(view_name="plugins-api:nautobot_ssot-api:automationgatewaymodel-detail") + + class Meta: + """Meta class definition.""" + + model = models.AutomationGatewayModel + fields = "__all__" diff --git a/nautobot_ssot/integrations/itential/filters.py b/nautobot_ssot/integrations/itential/filters.py new file mode 100644 index 000000000..64547fc84 --- /dev/null +++ b/nautobot_ssot/integrations/itential/filters.py @@ -0,0 +1,17 @@ +"""Itential SSoT Filters.""" + +from nautobot.apps.filters import BaseFilterSet, SearchFilter + +from nautobot_ssot.integrations.itential import models + + +class AutomationGatewayModelFilterSet(BaseFilterSet): + """AutomationGatewayModel FilterSet.""" + + q = SearchFilter(filter_predicates={"name": "icontains"}) + + class Meta: + """Meta class definition.""" + + model = models.AutomationGatewayModel + fields = ["name"] diff --git a/nautobot_ssot/integrations/itential/forms.py b/nautobot_ssot/integrations/itential/forms.py new file mode 100644 index 000000000..46d326f7d --- /dev/null +++ b/nautobot_ssot/integrations/itential/forms.py @@ -0,0 +1,43 @@ +"""Itential SSoT Forms.""" + +from django import forms + +from nautobot.apps.forms import BootstrapMixin, BulkEditForm, NautobotModelForm + +from nautobot_ssot.integrations.itential import models + + +class AutomationGatewayModelBulkEditForm(BootstrapMixin, BulkEditForm): + """AutomationGatewayModel BulkEdit form.""" + + pk = forms.ModelMultipleChoiceField( + queryset=models.AutomationGatewayModel.objects.all(), widget=forms.MultipleHiddenInput + ) + enabled = forms.BooleanField(required=False) + + class Meta: + """Meta class definition.""" + + nullable_fields = [] + + +class AutomationGatewayModelFilterForm(BootstrapMixin, forms.Form): + """AutotmationGatewayModel FilterForm form.""" + + class Meta: + """Meta class definition.""" + + model = models.AutomationGatewayModel + q = forms.CharField(required=False, label="Search") + name = forms.CharField(required=False) + enabled = forms.BooleanField(required=False) + + +class AutomationGatewayModelForm(NautobotModelForm): + """AutomationGatewayModel Form form.""" + + class Meta: + """Meta class definition.""" + + model = models.AutomationGatewayModel + fields = ["name", "description", "location", "location_descendants", "gateway", "enabled"] diff --git a/nautobot_ssot/integrations/itential/models.py b/nautobot_ssot/integrations/itential/models.py new file mode 100644 index 000000000..62f994e00 --- /dev/null +++ b/nautobot_ssot/integrations/itential/models.py @@ -0,0 +1,49 @@ +"""Models for Nautobot Itential.""" + +# Django imports +from django.db import models + +# Nautobot imports +from nautobot.apps.models import PrimaryModel +from nautobot.dcim.models import Location +from nautobot.extras.models import ExternalIntegration + + +class AutomationGatewayModel(PrimaryModel): # pylint: disable=too-many-ancestors + """Automation Gateway model for Nautobot Itential app.""" + + name = models.CharField(max_length=255, unique=True) + description = models.CharField(max_length=512, blank=True) + location = models.ForeignKey( + Location, + on_delete=models.CASCADE, + verbose_name="Location", + help_text="Automation Gateway manages devices from this location.", + ) + location_descendants = models.BooleanField( + default=True, + verbose_name="Include Location Descendants", + help_text="Include descendant locations.", + ) + gateway = models.OneToOneField( + ExternalIntegration, + on_delete=models.CASCADE, + verbose_name="Automation Gateway", + help_text="Automation Gateway server defined from external integration model.", + ) + enabled = models.BooleanField( + default=False, + verbose_name="Automation Gateway enabled", + help_text="Enable or Disable the Automation Gateway from being managed by Nautobot.", + ) + + class Meta: + """Meta class.""" + + ordering = ["name", "location"] + verbose_name = "Automation Gateway Management" + verbose_name_plural = "Automation Gateway Management" + + def __str__(self): + """Stringify instance.""" + return self.name diff --git a/nautobot_ssot/integrations/itential/tables.py b/nautobot_ssot/integrations/itential/tables.py new file mode 100644 index 000000000..a8c3f9ba2 --- /dev/null +++ b/nautobot_ssot/integrations/itential/tables.py @@ -0,0 +1,25 @@ +"""Itential SSoT tables.""" + +import django_tables2 as tables + +from nautobot.apps.tables import ( + BaseTable, + ButtonsColumn, + ToggleColumn, +) + +from nautobot_ssot.integrations.itential import models + + +class AutomationGatewayModelTable(BaseTable): + """AutomationGatewayModel Table.""" + + pk = ToggleColumn() + name = tables.LinkColumn() + actions = ButtonsColumn(models.AutomationGatewayModel) + + class Meta: + """Meta class definition.""" + + model = models.AutomationGatewayModel + fields = ["name", "description", "location", "location_descendants", "gateway", "enabled"] diff --git a/nautobot_ssot/integrations/itential/urls.py b/nautobot_ssot/integrations/itential/urls.py new file mode 100644 index 000000000..389495c05 --- /dev/null +++ b/nautobot_ssot/integrations/itential/urls.py @@ -0,0 +1,10 @@ +"""Itential SSoT URL's.""" + +from nautobot.apps.urls import NautobotUIViewSetRouter + +from nautobot_ssot.integrations.itential import views + +router = NautobotUIViewSetRouter() +router.register("itential/automation-gateway", views.AutomationGatewayModelUIViewSet) + +urlpatterns = router.urls diff --git a/nautobot_ssot/integrations/itential/views.py b/nautobot_ssot/integrations/itential/views.py new file mode 100644 index 000000000..cfaf63759 --- /dev/null +++ b/nautobot_ssot/integrations/itential/views.py @@ -0,0 +1,17 @@ +"""Itential SSoT Views.""" + +from nautobot.apps import views +from nautobot_ssot.integrations.itential import forms, filters, tables, models +from nautobot_ssot.integrations.itential.api import serializers + + +class AutomationGatewayModelUIViewSet(views.NautobotUIViewSet): + """Automation Gateway Model UI ViewSet class.""" + + bulk_update_form_class = forms.AutomationGatewayModelBulkEditForm + filterset_class = filters.AutomationGatewayModelFilterSet + filterset_form_class = forms.AutomationGatewayModelFilterForm + form_class = forms.AutomationGatewayModelForm + queryset = models.AutomationGatewayModel.objects.all() + serializer_class = serializers.AutomationGatewayModelSerializer + table_class = tables.AutomationGatewayModelTable diff --git a/nautobot_ssot/migrations/0009_automationgatewaymodel.py b/nautobot_ssot/migrations/0009_automationgatewaymodel.py new file mode 100644 index 000000000..c6e64e9d8 --- /dev/null +++ b/nautobot_ssot/migrations/0009_automationgatewaymodel.py @@ -0,0 +1,56 @@ +# Generated by Django 3.2.23 on 2024-04-14 02:48 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion +import nautobot.core.models.fields +import nautobot.extras.models.mixins +import uuid + + +class Migration(migrations.Migration): + dependencies = [ + ("dcim", "0052_fix_interface_redundancy_group_created"), + ("extras", "0102_set_null_objectchange_contenttype"), + ("nautobot_ssot", "0008_auto_20240110_1019"), + ] + + operations = [ + migrations.CreateModel( + name="AutomationGatewayModel", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("created", models.DateTimeField(auto_now_add=True, null=True)), + ("last_updated", models.DateTimeField(auto_now=True, null=True)), + ( + "_custom_field_data", + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("name", models.CharField(max_length=255, unique=True)), + ("description", models.CharField(blank=True, max_length=512)), + ("location_descendants", models.BooleanField(default=True)), + ("enabled", models.BooleanField(default=False)), + ( + "gateway", + models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="extras.externalintegration"), + ), + ("location", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="dcim.location")), + ("tags", nautobot.core.models.fields.TagsField(through="extras.TaggedItem", to="extras.Tag")), + ], + options={ + "verbose_name": "Automation Gateway Management", + "verbose_name_plural": "Automation Gateway Management", + "ordering": ["name", "location"], + }, + bases=( + models.Model, + nautobot.extras.models.mixins.DynamicGroupMixin, + nautobot.extras.models.mixins.NotesMixin, + ), + ), + ] diff --git a/nautobot_ssot/models.py b/nautobot_ssot/models.py index 8b28c34bc..e4c7810dd 100644 --- a/nautobot_ssot/models.py +++ b/nautobot_ssot/models.py @@ -36,6 +36,7 @@ from nautobot_ssot.integrations.servicenow.models import SSOTServiceNowConfig from nautobot_ssot.integrations.infoblox.models import SSOTInfobloxConfig +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from .choices import SyncLogEntryActionChoices, SyncLogEntryStatusChoices @@ -218,6 +219,7 @@ class Meta: __all__ = ( "SSOTInfobloxConfig", + "AutomationGatewayModel", "SSOTServiceNowConfig", "Sync", "SyncLogEntry", From fed19570f340322181a0c59d763879b3b7898588 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 12:58:57 -0500 Subject: [PATCH 177/229] add itential navigation and api endpoints --- nautobot_ssot/integrations/itential/api/urls.py | 10 ++++++++++ nautobot_ssot/integrations/itential/api/views.py | 14 ++++++++++++++ nautobot_ssot/integrations/itential/navigation.py | 12 ++++++++++++ nautobot_ssot/navigation.py | 10 ++++++++++ 4 files changed, 46 insertions(+) create mode 100644 nautobot_ssot/integrations/itential/api/urls.py create mode 100644 nautobot_ssot/integrations/itential/api/views.py create mode 100644 nautobot_ssot/integrations/itential/navigation.py diff --git a/nautobot_ssot/integrations/itential/api/urls.py b/nautobot_ssot/integrations/itential/api/urls.py new file mode 100644 index 000000000..8c191038f --- /dev/null +++ b/nautobot_ssot/integrations/itential/api/urls.py @@ -0,0 +1,10 @@ +"""Itential SSoT API URL's.""" + +from nautobot.apps.api import OrderedDefaultRouter +from nautobot_ssot.integrations.itential.api import views + + +router = OrderedDefaultRouter(view_name="Itential SSoT") +router.register("models", views.AutomationGatewayModelViewSet) + +urlpatterns = router.urls diff --git a/nautobot_ssot/integrations/itential/api/views.py b/nautobot_ssot/integrations/itential/api/views.py new file mode 100644 index 000000000..34101dc7c --- /dev/null +++ b/nautobot_ssot/integrations/itential/api/views.py @@ -0,0 +1,14 @@ +"""Itential SSoT API Views.""" + +from nautobot.apps.api import NautobotModelViewSet + +from nautobot_ssot.integrations.itential import models, filters +from nautobot_ssot.integrations.itential.api import serializers + + +class AutomationGatewayModelViewSet(NautobotModelViewSet): + """AutomationGatewayModel API ViewSet.""" + + queryset = models.AutomationGatewayModel.objects.all() + serializer_class = serializers.AutomationGatewayModelSerializer + filterset_class = filters.AutomationGatewayModelFilterSet diff --git a/nautobot_ssot/integrations/itential/navigation.py b/nautobot_ssot/integrations/itential/navigation.py new file mode 100644 index 000000000..63083e747 --- /dev/null +++ b/nautobot_ssot/integrations/itential/navigation.py @@ -0,0 +1,12 @@ +"""Itential SSoT Navigation.""" + +from nautobot.apps.ui import NavMenuGroup, NavMenuItem, NavMenuTab + + +nav_items = [ + NavMenuItem( + link="plugins:nautobot_ssot:automationgatewaymodel_list", + name="Itential Automation Gateway", + permissions=["nautobot_ssot.view_sync"], + ), +] diff --git a/nautobot_ssot/navigation.py b/nautobot_ssot/navigation.py index 398e493f8..ac686903e 100644 --- a/nautobot_ssot/navigation.py +++ b/nautobot_ssot/navigation.py @@ -1,6 +1,7 @@ """App additions to the Nautobot navigation menu.""" from nautobot.apps.ui import NavMenuGroup, NavMenuItem, NavMenuTab +from .integrations.utils import each_enabled_integration_module items = [ @@ -21,6 +22,15 @@ ), ] + +def _add_integrations(): + for module in each_enabled_integration_module("navigation"): + items.extend(module.nav_items) + + +_add_integrations() + + menu_items = ( NavMenuTab( name="Plugins", From 7e1325b8d1e6d34e68535c18cb3331f7e0a243db Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 14:18:00 -0500 Subject: [PATCH 178/229] Update api client --- .../integrations/itential/clients.py | 271 ++++++++++++++++++ nautobot_ssot/tests/itential/__init__.py | 0 .../tests/itential/fixtures/__init__.py | 1 + .../tests/itential/fixtures/gateways.py | 17 ++ nautobot_ssot/tests/itential/test_clients.py | 65 +++++ 5 files changed, 354 insertions(+) create mode 100644 nautobot_ssot/integrations/itential/clients.py create mode 100644 nautobot_ssot/tests/itential/__init__.py create mode 100644 nautobot_ssot/tests/itential/fixtures/__init__.py create mode 100644 nautobot_ssot/tests/itential/fixtures/gateways.py create mode 100644 nautobot_ssot/tests/itential/test_clients.py diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py new file mode 100644 index 000000000..fe019aea5 --- /dev/null +++ b/nautobot_ssot/integrations/itential/clients.py @@ -0,0 +1,271 @@ +"""Itential SSoT API Clients.""" + +import requests + +from typing import List, Optional, Union + + +class AutomationGatewayClient: + """Itential Automation Gateway API Client.""" + + def __init__( + self, + host: str, + username: str, + password: str, + job: object, + port: Optional[int] = 8443, + https_enabled: Optional[bool] = True, + verify_ssl: Optional[bool] = True, + api_version: Optional[str] = "2.0", + ): + """Initialize the API client. + + Args: + host (str): Hostname or IP address of automation gateway. + username (str): Username. + password (str): Password. + job (object): Job object. + port (Optional[int], optional): TCP port to connect to. Defaults to 8443. + https_enabled (Optional[bool], optional): Enable or disable HTTPS. Defaults to True. + verify_ssl (Optional[bool], optional): Enable or disable verification of SSL. Defaults to True. + api_version (Optional[str], optional): Automation Gateway API version. + """ + self.host = host + self.username = username + self.password = password + self.job = job + self.port = port + self.protocol = "https" if self.https_enabled else "http" + self.verify_ssl = verify_ssl + self.api_version = api_version + self.session = requests.Session() + self.cookie = {} + + def __enter__(self): + """Context manager setup.""" + self.login() + return + + def __exit__(self, exc_type, exc_value, traceback): + """Context manager teardown.""" + self.logout() + + @property + def base_url(self): + """Build base URL.""" + return f"{self.protocol}://{self.host}:{str(self.port)}/api/{self.api_version}" + + def _get(self, uri: str) -> requests.Response: + """Perform a GET request to the specified uri.""" + response = self.session.get(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + return response + + def _post(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: + """Perform a POST request to the specified uri.""" + if json_data: + response = self.session.post(f"{self.base_url}/{uri}", json=json_data, verify_ssl=self.verify_ssl) + else: + response = self.session.post(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + return response + + def _put(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: + """Perform a PUT request to the specified uri.""" + if json_data: + response = self.session.put(f"{self.base_url}/{uri}", json=json_data, verify_ssl=self.verify_ssl) + else: + response = self.session.put(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + return response + + def _delete(self, uri: str) -> requests.Response: + """Perform a GET request to the specified uri.""" + response = self.session.delete(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + return response + + def login(self) -> Union[requests.Response, requests.HTTPError]: + """Login to Automation Gateway.""" + response = self._post(uri="login", json_data={"username": self.username, "password": self.password}) + + if response.ok: + self.job.log_info(message=f"Logging into {self.host}.") + self.cookie = {"AutomationGatewayToken": response.json()["token"]} + self.session.headers.update(self.cookie) + return response + self.job.log_warning(message=f"Failed to login to {self.host}.") + return response.raise_for_status() + + def logout(self) -> Union[requests.Response, requests.HTTPError]: + """Logout of Automation Gateway.""" + response = self._post(uri="logout") + if response.ok: + self.job.log_info(message=f"Logging out of {self.host}.") + return response + self.job.log_warning(message=f"Failed logging out of {self.host}.") + return response.raise_for_status() + + def status(self) -> Union[requests.Response, requests.HTTPError]: + """Get Automation Gateway status.""" + response = self._get(uri="poll") + if response.ok: + self.job.log_info(message=f"{self.host} polling is successful.") + return response + self.job.log_warning(message=f"Failed to poll {self.host}.") + return response.raise_for_status() + + def get_devices(self) -> Union[requests.Response, requests.HTTPError]: + """Get a devices.""" + response = self._get(uri="devices") + if response.ok: + self.job.log_info(message=f"Pulling devices from {self.host}.") + return response + self.job.log_warning(message=f"Failed pulling devices from {self.host}.") + return response.raise_for_status() + + def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: + """Get a device object. + + Args: + device_name (str): Device name. + + Returns: + dict: The device and its attributes. + """ + response = self._get(uri=f"devices/{device_name}") + if response.ok: + self.job.log_info(message=f"Pulling {device_name} from {self.host}.") + return response + self.job.log_warning(message=f"Failed pulling {device_name} from {self.host}.") + return response.raise_for_status() + + def create_device( + self, device_name: str, variables: Optional[dict] + ) -> Union[requests.Response, requests.HTTPError]: + """Create a device with attributes. + + Args: + device_name (str): Device name. + variables (dict, optional): Device attributes. Defaults to {}. + + Returns: + dict: API client return message. + """ + payload = {"name": device_name, "variables": variables} + response = self._post(uri=f"devices", json_data=payload) + if response.ok: + self.job.log_info(message=f"Creating {device_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to create {device_name} on {self.host}.") + return response.raise_for_status() + + def update_device( + self, device_name: str, variables: Optional[dict] + ) -> Union[requests.Response, requests.HTTPError]: + """Update a device with attributes. + + Args: + device_name (str): Device name. + variables (dict, optional): Device attributes. Defaults to {}. + + Returns: + dict: API client return message. + """ + payload = {"name": device_name, "variables": variables} + response = self._put(uri=f"devices", json_data=payload) + if response.ok: + self.job.log_info(message=f"Updating {device_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to update {device_name} on {self.host}.") + return response.raise_for_status() + + def delete_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: + """Delete a device. + + Args: + device_name (str): Device name. + + Returns: + dict: API client return message. + """ + response = self._delete(uri=f"devices/{device_name}") + if response.ok: + self.job.log_info(message=f"Deleting {device_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to delete {device_name} on {self.host}.") + return response.raise_for_status() + + def get_groups(self) -> List[str]: + """Get a groups.""" + response = self._get(uri="groups") + if response.ok: + self.job.log_info(message=f"Pulling groups from {self.host}.") + return response + self.job.log_warning(message=f"Failed pulling groups from {self.host}.") + return response.raise_for_status() + + def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: + """Get a group object. + + Args: + group_name (str): group name. + + Returns: + dict: The group and its attributes. + """ + response = self._get(uri=f"groups/{group_name}") + if response.ok: + self.job.log_info(message=f"Pulling {group_name} from {self.host}.") + return response + self.job.log_warning(message=f"Failed pulling {group_name} from {self.host}.") + return response.raise_for_status() + + def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: + """Create a group with attributes. + + Args: + group_name (str): group name. + variables (dict, optional): group attributes. Defaults to {}. + + Returns: + dict: API client return message. + """ + payload = {"name": group_name, "variables": variables} + response = self._post(uri=f"groups", json_data=payload) + if response.ok: + self.job.log_info(message=f"Creating {group_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to create {group_name} on {self.host}.") + return response.raise_for_status() + + def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: + """Update a group with attributes. + + Args: + group_name (str): group name. + variables (dict, optional): group attributes. Defaults to {}. + + Returns: + dict: API client return message. + """ + payload = {"name": group_name, "variables": variables} + response = self._put(uri=f"groups", json_data=payload) + if response.ok: + self.job.log_info(message=f"Updating {group_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to update {group_name} on {self.host}.") + return response.raise_for_status() + + def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: + """Delete a group. + + Args: + group_name (str): group name. + + Returns: + dict: API client return message. + """ + response = self._delete(uri=f"groups/{group_name}") + if response.ok: + self.job.log_info(message=f"Deleting {group_name} on {self.host}.") + return response + self.job.log_warning(message=f"Failed to delete {group_name} on {self.host}.") + return response.raise_for_status() diff --git a/nautobot_ssot/tests/itential/__init__.py b/nautobot_ssot/tests/itential/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/nautobot_ssot/tests/itential/fixtures/__init__.py b/nautobot_ssot/tests/itential/fixtures/__init__.py new file mode 100644 index 000000000..ab70fe894 --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT Fixtures.""" diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py new file mode 100644 index 000000000..8fe544fba --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -0,0 +1,17 @@ +"""Itential Automation Gateway Fixtures.""" + + +data = [ + { + "name": "IAG1", + "description": "Test IAG 1", + "location": "NYC", + "gateway": "iag1.example.com", + "enabled": True, + }, +] + + +def update_or_create_automation_gateways(): + """Fixture to populate Automation Gateways.""" + pass diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py new file mode 100644 index 000000000..d674b50f8 --- /dev/null +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -0,0 +1,65 @@ +"""Itential SSoT API Client Tests.""" + +from unittest import TestCase + +from nautobot_ssot.tests.itential.fixtures import gateways + + +class AutomationGatewayClientTestCase(TestCase): + """Itential Automation Gateway Client Test Cases.""" + + def setUp(self): + """Setup test cases.""" + pass + + def test_login(self): + """Test API client login.""" + pass + + def test_logout(self): + """Test API client logout.""" + pass + + def test_get_devices(self): + """Test get_devices.""" + pass + + def test_get_device(self): + """Test get_device.""" + pass + + def test_create_device(self): + """Test create_device.""" + pass + + def test_update_device(self): + """Test update_device.""" + pass + + def test_delete_device(self): + """Test delete_device.""" + pass + + def test_get_groups(self): + """Test get_groups.""" + pass + + def test_get_group(self): + """Test get_group.""" + pass + + def test_create_group(self): + """Test create_group.""" + pass + + def test_update_group(self): + """Test update_group.""" + pass + + def test_delete_group(self): + """Test delete_group.""" + pass + + def tearDown(self): + """Teardown test cases.""" + pass From 9359a7691da75c841c5107e58af4be80281cf7f4 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 17:57:27 -0500 Subject: [PATCH 179/229] add test_client fixtures --- .../tests/itential/fixtures/gateways.py | 101 +++++++++++++++++- nautobot_ssot/tests/itential/fixtures/urls.py | 1 + nautobot_ssot/tests/itential/test_clients.py | 17 ++- 3 files changed, 115 insertions(+), 4 deletions(-) create mode 100644 nautobot_ssot/tests/itential/fixtures/urls.py diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 8fe544fba..290e2d295 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -1,17 +1,112 @@ """Itential Automation Gateway Fixtures.""" +from nautobot.extras.models import Secret, SecretsGroup, SecretsGroupAssociation, ExternalIntegration, Status +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot.dcim.models import LocationType, Location -data = [ +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel + +gateways = [ { "name": "IAG1", "description": "Test IAG 1", + "region": "USA", "location": "NYC", "gateway": "iag1.example.com", "enabled": True, + "username_env": "IAG1_USERNAME", + "password_env": "IAG1_PASSWORD", + "secret_group": "testGroup1", + }, + { + "name": "IAG10", + "description": "Test IAG 10", + "region": "USA", + "location": "NYC", + "gateway": "iag10.example.com", + "enabled": False, + "username_env": "IAG1_USERNAME", + "password_env": "IAG1_PASSWORD", + "secret_group": "testGroup1", + }, + { + "name": "IAG2", + "description": "Test IAG 2", + "region": "Europe", + "location": "LON", + "gateway": "iag2.example.com", + "enabled": True, + "username_env": "IAG2_USERNAME", + "password_env": "IAG2_PASSWORD", + "secret_group": "testGroup2", }, ] +responses = {} + -def update_or_create_automation_gateways(): +def update_or_create_automation_gateways( + name: str, + description: str, + location: str, + region: str, + gateway: str, + enabled: bool, + username_env: str, + password_env: str, + secret_group: str, +): """Fixture to populate Automation Gateways.""" - pass + # Fetch the active status + status = Status.objects.get(name="Active") + + # Create a region location type + region_type, _ = LocationType.objects.update_or_create(name="Region") + + # Create a site location type + site_type, _ = LocationType.objects.update_or_create(name="Site", parent=region_type) + + # Create a region location + region, _ = Location.objects.update_or_create(name=region, location_type=region_type, status=status) + + # Create a location with the region as the parent + location, _ = Location.objects.update_or_create( + name=location, location_type=site_type, parent=region, status=status + ) + + # Create a REST username secret + secret_username, _ = Secret.objects.update_or_create( + name=username_env, provider="environment-variable", parameters={"variable": username_env} + ) + + # Create a REST password secret + secret_password, _ = Secret.objects.update_or_create( + name=password_env, provider="environment-variable", parameters={"variable": password_env} + ) + + # Create a secrets group + secret_group, _ = SecretsGroup.objects.update_or_create(name=secret_group) + + # Associate the REST username with the secrets group + username_assoc, _ = SecretsGroupAssociation.objects.update_or_create( + secrets_group=secret_group, + secret=secret_username, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + + # Associate the REST password with the secrets group + password_assoc, _ = SecretsGroupAssociation.objects.update_or_create( + secrets_group=secret_group, + secret=secret_password, + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + + # Create the external integration + gateway, _ = ExternalIntegration.objects.update_or_create(name=name, remote_url=gateway, secrets_group=secret_group) + + # Create the Automation Gateway object + automation_gateway, _ = AutomationGatewayModel.objects.update_or_create( + name=name, description=description, location=region, gateway=gateway, enabled=enabled + ) diff --git a/nautobot_ssot/tests/itential/fixtures/urls.py b/nautobot_ssot/tests/itential/fixtures/urls.py new file mode 100644 index 000000000..13ae1258f --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/urls.py @@ -0,0 +1 @@ +"""Itential SSoT URL fixtures.""" diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index d674b50f8..cfbc84ec4 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -1,5 +1,6 @@ """Itential SSoT API Client Tests.""" +import os from unittest import TestCase from nautobot_ssot.tests.itential.fixtures import gateways @@ -10,7 +11,21 @@ class AutomationGatewayClientTestCase(TestCase): def setUp(self): """Setup test cases.""" - pass + for device in gateways.gateways: + os.environ[device.get("username_env")] = "testUser" + os.environ[device.get("password_env")] = "testPass" + + gateways.update_or_create_automation_gateways( + name=device.get("name"), + description=device.get("description"), + location=device.get("location"), + region=device.get("region"), + gateway=device.get("gateway"), + enabled=device.get("enabled"), + username_env=device.get("username_env"), + password_env=device.get("password_env"), + secret_group=device.get("secret_group"), + ) def test_login(self): """Test API client login.""" From c3d0cb1315c9988170935988c09975a31b258e93 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 18:01:56 -0500 Subject: [PATCH 180/229] update itential test names --- nautobot_ssot/tests/itential/test_clients.py | 24 ++++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index cfbc84ec4..e4d752076 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -27,51 +27,51 @@ def setUp(self): secret_group=device.get("secret_group"), ) - def test_login(self): + def test_login__success(self): """Test API client login.""" pass - def test_logout(self): + def test_logout__success(self): """Test API client logout.""" pass - def test_get_devices(self): + def test_get_devices__success(self): """Test get_devices.""" pass - def test_get_device(self): + def test_get_device__success(self): """Test get_device.""" pass - def test_create_device(self): + def test_create_device__success(self): """Test create_device.""" pass - def test_update_device(self): + def test_update_device__success(self): """Test update_device.""" pass - def test_delete_device(self): + def test_delete_device__success(self): """Test delete_device.""" pass - def test_get_groups(self): + def test_get_groups__success(self): """Test get_groups.""" pass - def test_get_group(self): + def test_get_group__success(self): """Test get_group.""" pass - def test_create_group(self): + def test_create_group__success(self): """Test create_group.""" pass - def test_update_group(self): + def test_update_group__success(self): """Test update_group.""" pass - def test_delete_group(self): + def test_delete_group__success(self): """Test delete_group.""" pass From 9eba7bc86f5f38ab16bd56b766b61a1dc4b6daa8 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 18:38:07 -0500 Subject: [PATCH 181/229] add job logger fixture --- .../tests/itential/fixtures/logger.py | 19 +++++++++++++++++++ nautobot_ssot/tests/itential/test_clients.py | 4 +++- 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 nautobot_ssot/tests/itential/fixtures/logger.py diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py new file mode 100644 index 000000000..6b5ec5e51 --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -0,0 +1,19 @@ +"""Itential SSoT Job Logger fixtures.""" + +import logging + + +class JobLogger: + """Job Logger.""" + + def log_info(message: str): + """Info logging.""" + logging.info(message) + + def log_warning(message: str): + """Warning logging.""" + logging.warning(message) + + def log_failure(message: str): + """Failure logging.""" + logging.error(message) diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index e4d752076..4ed723165 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -3,7 +3,7 @@ import os from unittest import TestCase -from nautobot_ssot.tests.itential.fixtures import gateways +from nautobot_ssot.tests.itential.fixtures import gateways, logging class AutomationGatewayClientTestCase(TestCase): @@ -11,6 +11,8 @@ class AutomationGatewayClientTestCase(TestCase): def setUp(self): """Setup test cases.""" + self.job = logging.JobLogger() + for device in gateways.gateways: os.environ[device.get("username_env")] = "testUser" os.environ[device.get("password_env")] = "testPass" From bd4d445eb534e7ec9e2c62957e8d01717f631056 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 19:57:55 -0500 Subject: [PATCH 182/229] update test_clients --- .../integrations/itential/clients.py | 52 ++++++-------- .../tests/itential/fixtures/clients.py | 24 +++++++ .../tests/itential/fixtures/gateways.py | 27 +++++-- nautobot_ssot/tests/itential/fixtures/urls.py | 71 +++++++++++++++++++ nautobot_ssot/tests/itential/test_clients.py | 63 +++++++++++----- 5 files changed, 186 insertions(+), 51 deletions(-) create mode 100644 nautobot_ssot/tests/itential/fixtures/clients.py diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index fe019aea5..049b9fc58 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -14,10 +14,8 @@ def __init__( username: str, password: str, job: object, - port: Optional[int] = 8443, - https_enabled: Optional[bool] = True, verify_ssl: Optional[bool] = True, - api_version: Optional[str] = "2.0", + api_version: Optional[str] = "v2.0", ): """Initialize the API client. @@ -26,8 +24,6 @@ def __init__( username (str): Username. password (str): Password. job (object): Job object. - port (Optional[int], optional): TCP port to connect to. Defaults to 8443. - https_enabled (Optional[bool], optional): Enable or disable HTTPS. Defaults to True. verify_ssl (Optional[bool], optional): Enable or disable verification of SSL. Defaults to True. api_version (Optional[str], optional): Automation Gateway API version. """ @@ -35,8 +31,6 @@ def __init__( self.username = username self.password = password self.job = job - self.port = port - self.protocol = "https" if self.https_enabled else "http" self.verify_ssl = verify_ssl self.api_version = api_version self.session = requests.Session() @@ -54,32 +48,32 @@ def __exit__(self, exc_type, exc_value, traceback): @property def base_url(self): """Build base URL.""" - return f"{self.protocol}://{self.host}:{str(self.port)}/api/{self.api_version}" + return f"{self.host}/api/{self.api_version}" def _get(self, uri: str) -> requests.Response: """Perform a GET request to the specified uri.""" - response = self.session.get(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + response = self.session.get(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response def _post(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: """Perform a POST request to the specified uri.""" if json_data: - response = self.session.post(f"{self.base_url}/{uri}", json=json_data, verify_ssl=self.verify_ssl) + response = self.session.post(f"{self.base_url}/{uri}", json=json_data, verify=self.verify_ssl) else: - response = self.session.post(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + response = self.session.post(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response def _put(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: """Perform a PUT request to the specified uri.""" if json_data: - response = self.session.put(f"{self.base_url}/{uri}", json=json_data, verify_ssl=self.verify_ssl) + response = self.session.put(f"{self.base_url}/{uri}", json=json_data, verify=self.verify_ssl) else: - response = self.session.put(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + response = self.session.put(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response def _delete(self, uri: str) -> requests.Response: """Perform a GET request to the specified uri.""" - response = self.session.delete(f"{self.base_url}/{uri}", verify_ssl=self.verify_ssl) + response = self.session.delete(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response def login(self) -> Union[requests.Response, requests.HTTPError]: @@ -90,7 +84,7 @@ def login(self) -> Union[requests.Response, requests.HTTPError]: self.job.log_info(message=f"Logging into {self.host}.") self.cookie = {"AutomationGatewayToken": response.json()["token"]} self.session.headers.update(self.cookie) - return response + return response.json() self.job.log_warning(message=f"Failed to login to {self.host}.") return response.raise_for_status() @@ -99,7 +93,7 @@ def logout(self) -> Union[requests.Response, requests.HTTPError]: response = self._post(uri="logout") if response.ok: self.job.log_info(message=f"Logging out of {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed logging out of {self.host}.") return response.raise_for_status() @@ -108,7 +102,7 @@ def status(self) -> Union[requests.Response, requests.HTTPError]: response = self._get(uri="poll") if response.ok: self.job.log_info(message=f"{self.host} polling is successful.") - return response + return response.json() self.job.log_warning(message=f"Failed to poll {self.host}.") return response.raise_for_status() @@ -117,7 +111,7 @@ def get_devices(self) -> Union[requests.Response, requests.HTTPError]: response = self._get(uri="devices") if response.ok: self.job.log_info(message=f"Pulling devices from {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed pulling devices from {self.host}.") return response.raise_for_status() @@ -133,7 +127,7 @@ def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTP response = self._get(uri=f"devices/{device_name}") if response.ok: self.job.log_info(message=f"Pulling {device_name} from {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed pulling {device_name} from {self.host}.") return response.raise_for_status() @@ -153,7 +147,7 @@ def create_device( response = self._post(uri=f"devices", json_data=payload) if response.ok: self.job.log_info(message=f"Creating {device_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to create {device_name} on {self.host}.") return response.raise_for_status() @@ -170,10 +164,10 @@ def update_device( dict: API client return message. """ payload = {"name": device_name, "variables": variables} - response = self._put(uri=f"devices", json_data=payload) + response = self._put(uri=f"devices/{device_name}", json_data=payload) if response.ok: self.job.log_info(message=f"Updating {device_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to update {device_name} on {self.host}.") return response.raise_for_status() @@ -189,7 +183,7 @@ def delete_device(self, device_name: str) -> Union[requests.Response, requests.H response = self._delete(uri=f"devices/{device_name}") if response.ok: self.job.log_info(message=f"Deleting {device_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to delete {device_name} on {self.host}.") return response.raise_for_status() @@ -198,7 +192,7 @@ def get_groups(self) -> List[str]: response = self._get(uri="groups") if response.ok: self.job.log_info(message=f"Pulling groups from {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed pulling groups from {self.host}.") return response.raise_for_status() @@ -214,7 +208,7 @@ def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPEr response = self._get(uri=f"groups/{group_name}") if response.ok: self.job.log_info(message=f"Pulling {group_name} from {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed pulling {group_name} from {self.host}.") return response.raise_for_status() @@ -232,7 +226,7 @@ def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requ response = self._post(uri=f"groups", json_data=payload) if response.ok: self.job.log_info(message=f"Creating {group_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to create {group_name} on {self.host}.") return response.raise_for_status() @@ -247,10 +241,10 @@ def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requ dict: API client return message. """ payload = {"name": group_name, "variables": variables} - response = self._put(uri=f"groups", json_data=payload) + response = self._put(uri=f"groups/{group_name}", json_data=payload) if response.ok: self.job.log_info(message=f"Updating {group_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to update {group_name} on {self.host}.") return response.raise_for_status() @@ -266,6 +260,6 @@ def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTT response = self._delete(uri=f"groups/{group_name}") if response.ok: self.job.log_info(message=f"Deleting {group_name} on {self.host}.") - return response + return response.json() self.job.log_warning(message=f"Failed to delete {group_name} on {self.host}.") return response.raise_for_status() diff --git a/nautobot_ssot/tests/itential/fixtures/clients.py b/nautobot_ssot/tests/itential/fixtures/clients.py new file mode 100644 index 000000000..51dc51fac --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/clients.py @@ -0,0 +1,24 @@ +"""Itential SSoT API Clients fixtures.""" + +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices + +from nautobot_ssot.tests.itential.fixtures import logger + +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient + + +def api_client(device_obj: AutomationGatewayModel, job: object = logger.JobLogger) -> AutomationGatewayClient: + """Initialize API Client.""" + + return AutomationGatewayClient( + host=device_obj.gateway.remote_url, + username=device_obj.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME + ), + password=device_obj.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD + ), + job=job, + verify_ssl=device_obj.gateway.verify_ssl, + ) diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 290e2d295..3828ec2e4 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -12,7 +12,7 @@ "description": "Test IAG 1", "region": "USA", "location": "NYC", - "gateway": "iag1.example.com", + "gateway": "https://iag1.example.com:8443", "enabled": True, "username_env": "IAG1_USERNAME", "password_env": "IAG1_PASSWORD", @@ -23,7 +23,7 @@ "description": "Test IAG 10", "region": "USA", "location": "NYC", - "gateway": "iag10.example.com", + "gateway": "https://iag10.example.com:8443", "enabled": False, "username_env": "IAG1_USERNAME", "password_env": "IAG1_PASSWORD", @@ -34,7 +34,7 @@ "description": "Test IAG 2", "region": "Europe", "location": "LON", - "gateway": "iag2.example.com", + "gateway": "https://iag2.example.com:8443", "enabled": True, "username_env": "IAG2_USERNAME", "password_env": "IAG2_PASSWORD", @@ -42,7 +42,26 @@ }, ] -responses = {} +responses = { + "iag1": { + "hostname": "https://iag1.example.com:8443", + "responses": { + "login": {"token": "abc123="}, + "logout": {}, + "poll": {}, + "get_devices": {}, + "get_device": {}, + "create_device": {}, + "update_device": {}, + "delete_device": {}, + "get_groups": {}, + "get_group": {}, + "create_group": {}, + "update_group": {}, + "delete_group": {}, + }, + }, +} def update_or_create_automation_gateways( diff --git a/nautobot_ssot/tests/itential/fixtures/urls.py b/nautobot_ssot/tests/itential/fixtures/urls.py index 13ae1258f..c06c19391 100644 --- a/nautobot_ssot/tests/itential/fixtures/urls.py +++ b/nautobot_ssot/tests/itential/fixtures/urls.py @@ -1 +1,72 @@ """Itential SSoT URL fixtures.""" + +from nautobot_ssot.tests.itential.fixtures import gateways + + +data = [ + { + "method": "POST", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/login", + "json": gateways.responses["iag1"]["responses"].get("login"), + }, + { + "method": "POST", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/logout", + "json": gateways.responses["iag1"]["responses"].get("logout"), + }, + { + "method": "GET", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/poll", + "json": gateways.responses["iag1"]["responses"].get("poll"), + }, + { + "method": "GET", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices", + "json": gateways.responses["iag1"]["responses"].get("get_devices"), + }, + { + "method": "GET", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr1.example.net", + "json": gateways.responses["iag1"]["responses"].get("get_device"), + }, + { + "method": "POST", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices", + "json": gateways.responses["iag1"]["responses"].get("create_device"), + }, + { + "method": "PUT", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr10.example.net", + "json": gateways.responses["iag1"]["responses"].get("update_device"), + }, + { + "method": "DELETE", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr10.example.net", + "json": gateways.responses["iag1"]["responses"].get("delete_device"), + }, + { + "method": "GET", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups", + "json": gateways.responses["iag1"]["responses"].get("get_groups"), + }, + { + "method": "GET", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/all", + "json": gateways.responses["iag1"]["responses"].get("get_group"), + }, + { + "method": "POST", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups", + "json": gateways.responses["iag1"]["responses"].get("create_group"), + }, + { + "method": "PUT", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/test-group", + "json": gateways.responses["iag1"]["responses"].get("update_group"), + }, + { + "method": "DELETE", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/test-group", + "json": gateways.responses["iag1"]["responses"].get("delete_group"), + }, +] diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index 4ed723165..d745d3c08 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -1,9 +1,11 @@ """Itential SSoT API Client Tests.""" import os +import requests_mock from unittest import TestCase -from nautobot_ssot.tests.itential.fixtures import gateways, logging +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +from nautobot_ssot.tests.itential.fixtures import gateways, logger, urls, clients class AutomationGatewayClientTestCase(TestCase): @@ -11,7 +13,9 @@ class AutomationGatewayClientTestCase(TestCase): def setUp(self): """Setup test cases.""" - self.job = logging.JobLogger() + self.job = logger.JobLogger() + self.requests_mock = requests_mock.Mocker() + self.requests_mock.start() for device in gateways.gateways: os.environ[device.get("username_env")] = "testUser" @@ -29,54 +33,77 @@ def setUp(self): secret_group=device.get("secret_group"), ) + for url_item in urls.data: + self.requests_mock.register_uri( + method=url_item.get("method"), + url=url_item.get("url"), + json=url_item.get("json"), + status_code=url_item.get("status_code", 200), + ) + + self.gateway = AutomationGatewayModel.objects.first() + self.client = clients.api_client(self.gateway) + def test_login__success(self): """Test API client login.""" - pass - - def test_logout__success(self): - """Test API client logout.""" - pass + response = self.client.login() + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("login")) def test_get_devices__success(self): """Test get_devices.""" - pass + response = self.client.get_devices() + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_devices")) def test_get_device__success(self): """Test get_device.""" - pass + response = self.client.get_device(device_name="rtr1.example.net") + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_device")) def test_create_device__success(self): """Test create_device.""" - pass + response = self.client.create_device(device_name="rtr10.example.net", variables={}) + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("create_device")) def test_update_device__success(self): """Test update_device.""" - pass + response = self.client.update_device(device_name="rtr10.example.net", variables={}) + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("update_device")) def test_delete_device__success(self): """Test delete_device.""" - pass + response = self.client.delete_device(device_name="rtr10.example.net") + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("delete_device")) def test_get_groups__success(self): """Test get_groups.""" - pass + response = self.client.get_groups() + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_groups")) def test_get_group__success(self): """Test get_group.""" - pass + response = self.client.get_group(group_name="all") + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_group")) def test_create_group__success(self): """Test create_group.""" - pass + response = self.client.create_group(group_name="test-group", variables={}) + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("create_group")) def test_update_group__success(self): """Test update_group.""" - pass + response = self.client.update_group(group_name="test-group", variables={}) + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("update_group")) def test_delete_group__success(self): """Test delete_group.""" - pass + response = self.client.delete_group(group_name="test-group") + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("delete_group")) + + def test_logout__success(self): + """Test API client logout.""" + response = self.client.logout() + self.assertEquals(response, gateways.responses["iag1"]["responses"].get("logout")) def tearDown(self): """Teardown test cases.""" - pass + self.requests_mock.stop() From fe5fa004ec7a0a244e86dbaab9b50137821daae5 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 20:00:03 -0500 Subject: [PATCH 183/229] flake8 --- nautobot_ssot/integrations/itential/clients.py | 4 ++-- nautobot_ssot/integrations/itential/navigation.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 049b9fc58..ed22eae0b 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -144,7 +144,7 @@ def create_device( dict: API client return message. """ payload = {"name": device_name, "variables": variables} - response = self._post(uri=f"devices", json_data=payload) + response = self._post(uri="devices", json_data=payload) if response.ok: self.job.log_info(message=f"Creating {device_name} on {self.host}.") return response.json() @@ -223,7 +223,7 @@ def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requ dict: API client return message. """ payload = {"name": group_name, "variables": variables} - response = self._post(uri=f"groups", json_data=payload) + response = self._post(uri="groups", json_data=payload) if response.ok: self.job.log_info(message=f"Creating {group_name} on {self.host}.") return response.json() diff --git a/nautobot_ssot/integrations/itential/navigation.py b/nautobot_ssot/integrations/itential/navigation.py index 63083e747..68bf155fa 100644 --- a/nautobot_ssot/integrations/itential/navigation.py +++ b/nautobot_ssot/integrations/itential/navigation.py @@ -1,6 +1,6 @@ """Itential SSoT Navigation.""" -from nautobot.apps.ui import NavMenuGroup, NavMenuItem, NavMenuTab +from nautobot.apps.ui import NavMenuItem nav_items = [ From 6f8073ca8e38bf8109816b38d480cc7a5af86ec2 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 20:07:11 -0500 Subject: [PATCH 184/229] diffsync structure --- nautobot_ssot/integrations/itential/diffsync/__init__.py | 1 + .../integrations/itential/diffsync/adapters/__init__.py | 1 + .../integrations/itential/diffsync/adapters/itential.py | 1 + .../integrations/itential/diffsync/adapters/nautobot.py | 1 + nautobot_ssot/integrations/itential/diffsync/models/__init__.py | 1 + nautobot_ssot/integrations/itential/diffsync/models/itential.py | 1 + nautobot_ssot/integrations/itential/diffsync/models/nautobot.py | 1 + 7 files changed, 7 insertions(+) create mode 100644 nautobot_ssot/integrations/itential/diffsync/__init__.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/adapters/__init__.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/adapters/itential.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/models/__init__.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/models/itential.py create mode 100644 nautobot_ssot/integrations/itential/diffsync/models/nautobot.py diff --git a/nautobot_ssot/integrations/itential/diffsync/__init__.py b/nautobot_ssot/integrations/itential/diffsync/__init__.py new file mode 100644 index 000000000..1c67bb64d --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT diffsync models and adapters.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/__init__.py b/nautobot_ssot/integrations/itential/diffsync/adapters/__init__.py new file mode 100644 index 000000000..d983f25fb --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT diffsync adapters.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py new file mode 100644 index 000000000..63c330e5f --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -0,0 +1 @@ +"""Itential SSoT adapters.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py new file mode 100644 index 000000000..4fbfcfd9f --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -0,0 +1 @@ +"""Itential SSoT Nautobot adapters.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/models/__init__.py b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py new file mode 100644 index 000000000..29a192ccc --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py @@ -0,0 +1 @@ +"""Itential SSoT diffsync models.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py new file mode 100644 index 000000000..2311b7e6a --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -0,0 +1 @@ +"""Itential SSoT models.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py new file mode 100644 index 000000000..4a61f7b6c --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -0,0 +1 @@ +"""Itential SSoT Nautobot models.""" From 6c1ffa99ac5f71554cc3a1c37eb5736840f17e22 Mon Sep 17 00:00:00 2001 From: jtdub Date: Sun, 14 Apr 2024 22:27:39 -0500 Subject: [PATCH 185/229] update gateways fixtures --- .../tests/itential/fixtures/gateways.py | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 3828ec2e4..78ec4e7c3 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -10,8 +10,7 @@ { "name": "IAG1", "description": "Test IAG 1", - "region": "USA", - "location": "NYC", + "region": "North America", "gateway": "https://iag1.example.com:8443", "enabled": True, "username_env": "IAG1_USERNAME", @@ -21,8 +20,7 @@ { "name": "IAG10", "description": "Test IAG 10", - "region": "USA", - "location": "NYC", + "region": "North America", "gateway": "https://iag10.example.com:8443", "enabled": False, "username_env": "IAG1_USERNAME", @@ -33,7 +31,6 @@ "name": "IAG2", "description": "Test IAG 2", "region": "Europe", - "location": "LON", "gateway": "https://iag2.example.com:8443", "enabled": True, "username_env": "IAG2_USERNAME", @@ -80,18 +77,10 @@ def update_or_create_automation_gateways( status = Status.objects.get(name="Active") # Create a region location type - region_type, _ = LocationType.objects.update_or_create(name="Region") - - # Create a site location type - site_type, _ = LocationType.objects.update_or_create(name="Site", parent=region_type) + location_type, _ = LocationType.objects.update_or_create(name="Region") # Create a region location - region, _ = Location.objects.update_or_create(name=region, location_type=region_type, status=status) - - # Create a location with the region as the parent - location, _ = Location.objects.update_or_create( - name=location, location_type=site_type, parent=region, status=status - ) + location, _ = Location.objects.update_or_create(name=region, location_type=location_type, status=status) # Create a REST username secret secret_username, _ = Secret.objects.update_or_create( @@ -127,5 +116,5 @@ def update_or_create_automation_gateways( # Create the Automation Gateway object automation_gateway, _ = AutomationGatewayModel.objects.update_or_create( - name=name, description=description, location=region, gateway=gateway, enabled=enabled + name=name, description=description, location=location, gateway=gateway, enabled=enabled ) From 1b670014ea19470d830b0e9a384d4f28b5d87a46 Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 08:52:43 -0500 Subject: [PATCH 186/229] add diffync models and adapters --- .../itential/diffsync/adapters/itential.py | 27 +++++++ .../itential/diffsync/adapters/nautobot.py | 79 +++++++++++++++++++ .../itential/diffsync/models/itential.py | 32 ++++++++ .../itential/diffsync/models/nautobot.py | 14 ++++ 4 files changed, 152 insertions(+) diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index 63c330e5f..0cec92e1d 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -1 +1,28 @@ """Itential SSoT adapters.""" + +from diffsync import DiffSync +from nautobot_ssot.integrations.itential.diffsync.models.itential import ItentialAnsibleDeviceModel +from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient + + +class ItentialAnsibleDeviceAdapter(DiffSync): + """Itential Ansible Device Diffsync adapter.""" + + device = ItentialAnsibleDeviceModel + top_level = ["device"] + + def __init__(self, api_client: AutomationGatewayClient, job: object, *args, **kwargs): + """Initialize Diffsync Adapter.""" + self.api_client = api_client + self.job = job + + def load(self): + """Load Adapter.""" + self.job.load_info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") + devices = self.api_client.get_devices() + + for iag_device in devices: + device_vars = iag_device.get("variables") + _device = self.device(name=iag_device.get("name"), variables=device_vars) + + self.add(_device) diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index 4fbfcfd9f..f2f762332 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -1 +1,80 @@ """Itential SSoT Nautobot adapters.""" + +import re +import traceback + +from diffsync import DiffSync + +from nautobot_ssot.integrations.itential.diffsync.models.nautobot import NautobotAnsibleDeviceModel + +from nautobot.dcim.models import Device, Location + + +class NautobotAnsibleAdapter(DiffSync): + """Nautobot => Itential Ansible Device Diffsync Adapter.""" + + device = NautobotAnsibleDeviceModel + top_level = ["device"] + + def __init__(self, job: object, location: str, location_descendants: bool): + """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" + self.job = job + self.location = location + self.location_descendants = location_descendants + + def _is_rfc1123_compliant(self, device_name: str) -> bool: + """Check to see if a device name is RFC 1123 compliant.""" + # Check for invalid characters (anything other than alphanumerics, hypens, and periods) + if not re.search("[a-zA-Z0-9][a-zA-Z0-9-.]{0,62}$", device_name): + self.job.log_warning(message=f"{device_name} has iinvalid characters.") + return False + + # RFC 1123 allows hostnames to start with a digit + label_pattern = r"[a-zA-Z0-9][a-zA-Z0-9-]{0,62}$" + + # Split device_name into labels and check each one + labels = device_name.split(".") + + for label in labels: + if not re.match(label_pattern, label) or label.endswith("-"): + self.job.log_warning(message=f"{device_name} has an invalid hostname pattern.") + return False + + return True + + def _ansible_vars(device_obj: Device) -> dict: + """Create device variables to load into Automation Gateway.""" + if device_obj.platform and device_obj.platform.network_driver_mappings.get("ansible"): + ansible_network_os = {"ansible_network_os": device_obj.platform.network_driver_mappings.get("ansible")} + else: + ansible_network_os = {} + + ansible_host = {"ansible_host": device_obj.primary_ipv4.host} + config_context = device_obj.get_config_context() + + return {**ansible_host, **ansible_network_os, **config_context} + + def load(self): + """Load Nautobot Diffsync adapter.""" + self.job.log_info(message="Loading locations from Nautobot.") + location = Location.objects.get(name=self.location) + locations = location.get_descendants(include_self=True) if self.location_descendants else location + + self.job.log_info(message="Loading devices from Nautobot.") + devices = Device.objects.filter(location__in=locations).exclude(primary_ipv4=None) + + for nb_device in devices: + try: + if self._is_rfc1123_compliant(nb_device.name): + device_vars = self._ansible_vars(nb_device) + _device = self.device(name=nb_device.name, variables=device_vars) + + self.add(_device) + else: + raise Exception(f"{nb_device.name} is not RFC 1123 compliant.") + except Exception as exc: + stacktrace = traceback.format_exc() + self.job.log_warning(message=f"{nb_device.name} was not added to inventory due to an error.") + self.job.log_warning( + message=f"An exception ocurred: " f"`{type(exec).__name__}: {exc}`\n```\n{stacktrace}\n```" + ) diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index 2311b7e6a..590f73fb8 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -1 +1,33 @@ """Itential SSoT models.""" + +from diffsync import DiffSyncModel +from typing import Optional + + +class ItentialAnsibleDeviceModel(DiffSyncModel): + """Itential Ansible Device DiffSyncModel.""" + + _modelname = "device" + _identifiers = ("name",) + _attributes = ("variables",) + + name: str + variables: Optional[dict] + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create device in Automation Gateway.""" + diffsync.api_client.create_device(device_name=ids.get("name"), variables=attrs.get("variables")) + diffsync.api_client.add_device_to_group(group_name="all", device_name=ids.get("name")) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def delete(self): + """Delete device in Automation Gateway.""" + self.diffsync.api_client.delete_device_from_group(group_name="all", device_name=self.name) + self.diffsync.api_client.delete_device(device_name=self.name) + return super().delete() + + def update(self, attrs): + """Update device in Automation Gateway.""" + self.diffsync.api_cient.update_device(device_name=self.name, variables=attrs.get("variables")) + return super().update(attrs) diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py index 4a61f7b6c..26ce76f26 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -1 +1,15 @@ """Itential SSoT Nautobot models.""" + +from diffsync import DiffSyncModel +from typing import Optional + + +class NautobotAnsibleDeviceModel(DiffSyncModel): + """Nautobot => Itential Ansible Device DiffSyncModel.""" + + _modelname = "device" + _identifiers = ("name",) + _attributes = ("variables",) + + name: str + variables: Optional[dict] From 695f5a7a417b7efc7859dcdd4e85310eb6f66298 Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 08:59:41 -0500 Subject: [PATCH 187/229] resolve poetry lock conflict --- .../integrations/itential/clients.py | 7 + .../integrations/itential/constants.py | 5 + .../itential/diffsync/adapters/itential.py | 2 +- poetry.lock | 1223 +++++++++-------- pyproject.toml | 1 + 5 files changed, 638 insertions(+), 600 deletions(-) create mode 100644 nautobot_ssot/integrations/itential/constants.py diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index ed22eae0b..913fb5a25 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -2,8 +2,11 @@ import requests +from retry import retry from typing import List, Optional, Union +from nautobot_ssot.integrations.itential.constants import BACKOFF, DELAY, RETRIES + class AutomationGatewayClient: """Itential Automation Gateway API Client.""" @@ -50,11 +53,13 @@ def base_url(self): """Build base URL.""" return f"{self.host}/api/{self.api_version}" + @retry(requests.exceptions.HTTPError, delay=DELAY, tries=RETRIES, backoff=BACKOFF) def _get(self, uri: str) -> requests.Response: """Perform a GET request to the specified uri.""" response = self.session.get(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response + @retry(requests.exceptions.HTTPError, delay=DELAY, tries=RETRIES, backoff=BACKOFF) def _post(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: """Perform a POST request to the specified uri.""" if json_data: @@ -63,6 +68,7 @@ def _post(self, uri: str, json_data: Optional[dict] = None) -> requests.Response response = self.session.post(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response + @retry(requests.exceptions.HTTPError, delay=DELAY, tries=RETRIES, backoff=BACKOFF) def _put(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: """Perform a PUT request to the specified uri.""" if json_data: @@ -71,6 +77,7 @@ def _put(self, uri: str, json_data: Optional[dict] = None) -> requests.Response: response = self.session.put(f"{self.base_url}/{uri}", verify=self.verify_ssl) return response + @retry(requests.exceptions.HTTPError, delay=DELAY, tries=RETRIES, backoff=BACKOFF) def _delete(self, uri: str) -> requests.Response: """Perform a GET request to the specified uri.""" response = self.session.delete(f"{self.base_url}/{uri}", verify=self.verify_ssl) diff --git a/nautobot_ssot/integrations/itential/constants.py b/nautobot_ssot/integrations/itential/constants.py new file mode 100644 index 000000000..03da396f6 --- /dev/null +++ b/nautobot_ssot/integrations/itential/constants.py @@ -0,0 +1,5 @@ +"""Itential SSoT constants.""" + +DELAY = 1 +RETRIES = 2 +BACKOFF = 2 diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index 0cec92e1d..b0a5b289b 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -19,7 +19,7 @@ def __init__(self, api_client: AutomationGatewayClient, job: object, *args, **kw def load(self): """Load Adapter.""" self.job.load_info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") - devices = self.api_client.get_devices() + devices = self.api_client.get_devices().get("data") for iag_device in devices: device_vars = iag_device.get("variables") diff --git a/poetry.lock b/poetry.lock index e0d827150..f79e8bd3d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -163,13 +163,13 @@ files = [ [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -213,13 +213,13 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "astroid" -version = "3.1.0" +version = "3.2.2" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, - {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, ] [package.dependencies] @@ -303,13 +303,13 @@ tomli = "*" [[package]] name = "babel" -version = "2.14.0" +version = "2.15.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, ] [package.dependencies] @@ -362,13 +362,13 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.8" +version = "1.7.9" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.8" files = [ - {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, - {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, + {file = "bandit-1.7.9-py3-none-any.whl", hash = "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec"}, + {file = "bandit-1.7.9.tar.gz", hash = "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61"}, ] [package.dependencies] @@ -397,33 +397,33 @@ files = [ [[package]] name = "black" -version = "24.4.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, - {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, - {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, - {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, - {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, - {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, - {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, - {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, - {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, - {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, - {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, - {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, - {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, - {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, - {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, - {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, - {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, - {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, - {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, - {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, - {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, - {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -499,13 +499,13 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -753,28 +753,28 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] [[package]] name = "cloudvision" -version = "1.19.1" +version = "1.20.0" description = "A Python library for Arista's CloudVision APIs and Provisioning Action integrations." optional = true python-versions = ">=3.7.0" files = [ - {file = "cloudvision-1.19.1-py3-none-any.whl", hash = "sha256:76249cd7054f515f0e20084489beecccfa83d48563b5db11a6f2493ed6e54a52"}, - {file = "cloudvision-1.19.1.tar.gz", hash = "sha256:f865cc7a5e324fbd6259454f5e2b056c381566349e797e0ddce2131b52d971e1"}, + {file = "cloudvision-1.20.0-py3-none-any.whl", hash = "sha256:ccdc8116184d808226a1cbb8c678a6e4f338d67537d469c41231e37cb31aa00f"}, + {file = "cloudvision-1.20.0.tar.gz", hash = "sha256:33760b93aa1d4267acd195432cbb5b6228e6c00d965b9127d6d7ec276c70badf"}, ] [package.dependencies] cryptography = ">=42.0.4,<43.0.0" -grpcio = ">=1.46.0" +grpcio = ">=1.53.0" msgpack = ">=1.0.3" -protobuf = ">=3.20.1,<4.0" +protobuf = ">=4.22.5,<5.0" requests = ">=2.20.1" -types-protobuf = ">=3.20.1,<4.0" +types-protobuf = ">=3.20.4.6,<4.0" types-PyYAML = ">=6.0.7" types-requests = ">=2.27.25" typing-extensions = ">=4.2.0" [package.extras] -dev = ["black (==24.3.0)", "flake8 (==3.8.4)", "grpcio-tools (==1.46.0)", "isort (==5.11.4)", "mypy (==0.950)", "mypy-protobuf (==3.2.0)", "numpy (==1.26.4)", "pytest (==7.1.2)", "pyyaml (==6.0.1)", "twine (==4.0.1)", "types-attrs (>=19.1.0)", "wheel (==0.38.4)"] +dev = ["black (==24.3.0)", "flake8 (==3.8.4)", "grpcio-tools (>=1.53.2)", "isort (==5.11.4)", "mypy (==0.950)", "mypy-protobuf (==3.2.0)", "numpy (==1.26.4)", "pytest (==7.1.2)", "pyyaml (==6.0.1)", "twine (==4.0.1)", "types-attrs (>=19.1.0)", "wheel (==0.38.4)"] [[package]] name = "colorama" @@ -874,63 +874,63 @@ test-no-images = ["pytest", "pytest-cov", "wurlitzer"] [[package]] name = "coverage" -version = "7.4.4" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.extras] @@ -952,43 +952,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -1006,15 +1006,16 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cvprac" -version = "1.3.2" +version = "1.4.0" description = "Arista Cloudvision(R) Portal Rest API Client written in python" optional = true python-versions = "*" files = [ - {file = "cvprac-1.3.2.tar.gz", hash = "sha256:5f0e5249c7e73d3c6f10a698be57f50382073773e6c81d74640fcc4976b602e5"}, + {file = "cvprac-1.4.0.tar.gz", hash = "sha256:97506caa0b2d543c33011f72b9f755a6f170f4a75dc523307c294b22cd2e7236"}, ] [package.dependencies] +packaging = ">=23.2" requests = {version = ">=2.27.0", extras = ["socks"]} [package.extras] @@ -1268,13 +1269,13 @@ Django = ">=3.2" [[package]] name = "django-health-check" -version = "3.18.1" +version = "3.18.2" description = "Run checks on services like databases, queue servers, celery processes, etc." optional = false python-versions = ">=3.8" files = [ - {file = "django-health-check-3.18.1.tar.gz", hash = "sha256:44552d55ae8950c9548d3b90f9d9fd5570b57446a19b2a8e674c82f993cb7a2c"}, - {file = "django_health_check-3.18.1-py2.py3-none-any.whl", hash = "sha256:2c89a326cd79830e2fc6808823a9e7e874ab23f7aef3ff2c4d1194c998e1dca1"}, + {file = "django_health_check-3.18.2-py2.py3-none-any.whl", hash = "sha256:16f9c9186236cbc2858fa0d0ecc3566ba2ad2b72683e5678d0d58eb9e8bbba1a"}, + {file = "django_health_check-3.18.2.tar.gz", hash = "sha256:21235120f8d756fa75ba430d0b0dbb04620fbd7bfac92ed6a0b911915ba38918"}, ] [package.dependencies] @@ -1282,7 +1283,7 @@ django = ">=2.2" [package.extras] docs = ["sphinx"] -test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] +test = ["boto3", "celery", "django-storages", "pytest", "pytest-cov", "pytest-django", "redis"] [[package]] name = "django-jinja" @@ -1533,13 +1534,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.4.1" +version = "2024.6.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2024.4.1.tar.gz", hash = "sha256:68532dd094714f79c1775c00848f22c10f004826abc856442ff30c3bc9c40bb4"}, - {file = "drf_spectacular_sidecar-2024.4.1-py3-none-any.whl", hash = "sha256:8359befe69a8953fea86be01c1ff37038854a62546225551de16c47c07dccd4e"}, + {file = "drf_spectacular_sidecar-2024.6.1-py3-none-any.whl", hash = "sha256:5ad678c788dcb36697a668884c6fdac2c511a4094cb010978bd01a6345197bbb"}, + {file = "drf_spectacular_sidecar-2024.6.1.tar.gz", hash = "sha256:eed744c26d2caff815fd67d89eca685f645479f07fb86c124d8ee26a13b1d960"}, ] [package.dependencies] @@ -1605,53 +1606,53 @@ pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "fonttools" -version = "4.51.0" +version = "4.53.0" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, - {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, - {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, - {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, - {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, - {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, - {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, - {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, - {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, - {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, - {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, - {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, - {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, + {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, + {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, + {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, + {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, + {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, + {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, + {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, + {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, + {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, + {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, + {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, + {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, + {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, + {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, ] [package.extras] @@ -1816,13 +1817,13 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", [[package]] name = "gprof2dot" -version = "2022.7.29" +version = "2024.6.6" description = "Generate a dot graph from the output of several profilers." optional = false -python-versions = ">=2.7" +python-versions = ">=3.8" files = [ - {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"}, - {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"}, + {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, + {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, ] [[package]] @@ -1919,13 +1920,13 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.44.0" +version = "0.45.3" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.44.0-py3-none-any.whl", hash = "sha256:8a4471c469ba980b87c843f1168850ce39d0c1d0c7be140dca2480f76c8e5446"}, - {file = "griffe-0.44.0.tar.gz", hash = "sha256:34aee1571042f9bf00529bc715de4516fb6f482b164e90d030300601009e0223"}, + {file = "griffe-0.45.3-py3-none-any.whl", hash = "sha256:ed1481a680ae3e28f91a06e0d8a51a5c9b97555aa2527abc2664447cc22337d6"}, + {file = "griffe-0.45.3.tar.gz", hash = "sha256:02ee71cc1a5035864b97bd0dbfff65c33f6f2c8854d3bd48a791905c2b8a44b9"}, ] [package.dependencies] @@ -1934,69 +1935,61 @@ colorama = ">=0.4" [[package]] name = "grpcio" -version = "1.62.2" +version = "1.64.1" description = "HTTP/2-based RPC framework" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "grpcio-1.62.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:66344ea741124c38588a664237ac2fa16dfd226964cca23ddc96bd4accccbde5"}, - {file = "grpcio-1.62.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5dab7ac2c1e7cb6179c6bfad6b63174851102cbe0682294e6b1d6f0981ad7138"}, - {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:3ad00f3f0718894749d5a8bb0fa125a7980a2f49523731a9b1fabf2b3522aa43"}, - {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e72ddfee62430ea80133d2cbe788e0d06b12f865765cb24a40009668bd8ea05"}, - {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53d3a59a10af4c2558a8e563aed9f256259d2992ae0d3037817b2155f0341de1"}, - {file = "grpcio-1.62.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1511a303f8074f67af4119275b4f954189e8313541da7b88b1b3a71425cdb10"}, - {file = "grpcio-1.62.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b94d41b7412ef149743fbc3178e59d95228a7064c5ab4760ae82b562bdffb199"}, - {file = "grpcio-1.62.2-cp310-cp310-win32.whl", hash = "sha256:a75af2fc7cb1fe25785be7bed1ab18cef959a376cdae7c6870184307614caa3f"}, - {file = "grpcio-1.62.2-cp310-cp310-win_amd64.whl", hash = "sha256:80407bc007754f108dc2061e37480238b0dc1952c855e86a4fc283501ee6bb5d"}, - {file = "grpcio-1.62.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:c1624aa686d4b36790ed1c2e2306cc3498778dffaf7b8dd47066cf819028c3ad"}, - {file = "grpcio-1.62.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:1c1bb80299bdef33309dff03932264636450c8fdb142ea39f47e06a7153d3063"}, - {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:db068bbc9b1fa16479a82e1ecf172a93874540cb84be69f0b9cb9b7ac3c82670"}, - {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc8a308780edbe2c4913d6a49dbdb5befacdf72d489a368566be44cadaef1a"}, - {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0695ae31a89f1a8fc8256050329a91a9995b549a88619263a594ca31b76d756"}, - {file = "grpcio-1.62.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88b4f9ee77191dcdd8810241e89340a12cbe050be3e0d5f2f091c15571cd3930"}, - {file = "grpcio-1.62.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a0204532aa2f1afd467024b02b4069246320405bc18abec7babab03e2644e75"}, - {file = "grpcio-1.62.2-cp311-cp311-win32.whl", hash = "sha256:6e784f60e575a0de554ef9251cbc2ceb8790914fe324f11e28450047f264ee6f"}, - {file = "grpcio-1.62.2-cp311-cp311-win_amd64.whl", hash = "sha256:112eaa7865dd9e6d7c0556c8b04ae3c3a2dc35d62ad3373ab7f6a562d8199200"}, - {file = "grpcio-1.62.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:65034473fc09628a02fb85f26e73885cf1ed39ebd9cf270247b38689ff5942c5"}, - {file = "grpcio-1.62.2-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d2c1771d0ee3cf72d69bb5e82c6a82f27fbd504c8c782575eddb7839729fbaad"}, - {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:3abe6838196da518863b5d549938ce3159d809218936851b395b09cad9b5d64a"}, - {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5ffeb269f10cedb4f33142b89a061acda9f672fd1357331dbfd043422c94e9e"}, - {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404d3b4b6b142b99ba1cff0b2177d26b623101ea2ce51c25ef6e53d9d0d87bcc"}, - {file = "grpcio-1.62.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:262cda97efdabb20853d3b5a4c546a535347c14b64c017f628ca0cc7fa780cc6"}, - {file = "grpcio-1.62.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17708db5b11b966373e21519c4c73e5a750555f02fde82276ea2a267077c68ad"}, - {file = "grpcio-1.62.2-cp312-cp312-win32.whl", hash = "sha256:b7ec9e2f8ffc8436f6b642a10019fc513722858f295f7efc28de135d336ac189"}, - {file = "grpcio-1.62.2-cp312-cp312-win_amd64.whl", hash = "sha256:aa787b83a3cd5e482e5c79be030e2b4a122ecc6c5c6c4c42a023a2b581fdf17b"}, - {file = "grpcio-1.62.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:cfd23ad29bfa13fd4188433b0e250f84ec2c8ba66b14a9877e8bce05b524cf54"}, - {file = "grpcio-1.62.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:af15e9efa4d776dfcecd1d083f3ccfb04f876d613e90ef8432432efbeeac689d"}, - {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:f4aa94361bb5141a45ca9187464ae81a92a2a135ce2800b2203134f7a1a1d479"}, - {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82af3613a219512a28ee5c95578eb38d44dd03bca02fd918aa05603c41018051"}, - {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ddaf53474e8caeb29eb03e3202f9d827ad3110475a21245f3c7712022882a9"}, - {file = "grpcio-1.62.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79b518c56dddeec79e5500a53d8a4db90da995dfe1738c3ac57fe46348be049"}, - {file = "grpcio-1.62.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5eb4844e5e60bf2c446ef38c5b40d7752c6effdee882f716eb57ae87255d20a"}, - {file = "grpcio-1.62.2-cp37-cp37m-win_amd64.whl", hash = "sha256:aaae70364a2d1fb238afd6cc9fcb10442b66e397fd559d3f0968d28cc3ac929c"}, - {file = "grpcio-1.62.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:1bcfe5070e4406f489e39325b76caeadab28c32bf9252d3ae960c79935a4cc36"}, - {file = "grpcio-1.62.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:da6a7b6b938c15fa0f0568e482efaae9c3af31963eec2da4ff13a6d8ec2888e4"}, - {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:41955b641c34db7d84db8d306937b72bc4968eef1c401bea73081a8d6c3d8033"}, - {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c772f225483905f675cb36a025969eef9712f4698364ecd3a63093760deea1bc"}, - {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ce1f775d37ca18c7a141300e5b71539690efa1f51fe17f812ca85b5e73262f"}, - {file = "grpcio-1.62.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:26f415f40f4a93579fd648f48dca1c13dfacdfd0290f4a30f9b9aeb745026811"}, - {file = "grpcio-1.62.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:db707e3685ff16fc1eccad68527d072ac8bdd2e390f6daa97bc394ea7de4acea"}, - {file = "grpcio-1.62.2-cp38-cp38-win32.whl", hash = "sha256:589ea8e75de5fd6df387de53af6c9189c5231e212b9aa306b6b0d4f07520fbb9"}, - {file = "grpcio-1.62.2-cp38-cp38-win_amd64.whl", hash = "sha256:3c3ed41f4d7a3aabf0f01ecc70d6b5d00ce1800d4af652a549de3f7cf35c4abd"}, - {file = "grpcio-1.62.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:162ccf61499c893831b8437120600290a99c0bc1ce7b51f2c8d21ec87ff6af8b"}, - {file = "grpcio-1.62.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:f27246d7da7d7e3bd8612f63785a7b0c39a244cf14b8dd9dd2f2fab939f2d7f1"}, - {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:2507006c8a478f19e99b6fe36a2464696b89d40d88f34e4b709abe57e1337467"}, - {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a90ac47a8ce934e2c8d71e317d2f9e7e6aaceb2d199de940ce2c2eb611b8c0f4"}, - {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99701979bcaaa7de8d5f60476487c5df8f27483624f1f7e300ff4669ee44d1f2"}, - {file = "grpcio-1.62.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:af7dc3f7a44f10863b1b0ecab4078f0a00f561aae1edbd01fd03ad4dcf61c9e9"}, - {file = "grpcio-1.62.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fa63245271920786f4cb44dcada4983a3516be8f470924528cf658731864c14b"}, - {file = "grpcio-1.62.2-cp39-cp39-win32.whl", hash = "sha256:c6ad9c39704256ed91a1cffc1379d63f7d0278d6a0bad06b0330f5d30291e3a3"}, - {file = "grpcio-1.62.2-cp39-cp39-win_amd64.whl", hash = "sha256:16da954692fd61aa4941fbeda405a756cd96b97b5d95ca58a92547bba2c1624f"}, - {file = "grpcio-1.62.2.tar.gz", hash = "sha256:c77618071d96b7a8be2c10701a98537823b9c65ba256c0b9067e0594cdbd954d"}, + {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, + {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, + {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, + {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, + {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, + {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, + {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, + {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, + {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, + {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, + {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, + {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, + {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, + {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, + {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, + {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, + {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, + {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, + {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, + {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, + {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.62.2)"] +protobuf = ["grpcio-tools (>=1.64.1)"] [[package]] name = "h11" @@ -2066,89 +2059,105 @@ files = [ [[package]] name = "ijson" -version = "3.2.3" +version = "3.3.0" description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" files = [ - {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a4ae076bf97b0430e4e16c9cb635a6b773904aec45ed8dcbc9b17211b8569ba"}, - {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cfced0a6ec85916eb8c8e22415b7267ae118eaff2a860c42d2cc1261711d0d31"}, - {file = "ijson-3.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b9d1141cfd1e6d6643aa0b4876730d0d28371815ce846d2e4e84a2d4f471cf3"}, - {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e0a27db6454edd6013d40a956d008361aac5bff375a9c04ab11fc8c214250b5"}, - {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0d526ccb335c3c13063c273637d8611f32970603dfb182177b232d01f14c23"}, - {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:545a30b3659df2a3481593d30d60491d1594bc8005f99600e1bba647bb44cbb5"}, - {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9680e37a10fedb3eab24a4a7e749d8a73f26f1a4c901430e7aa81b5da15f7307"}, - {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2a80c0bb1053055d1599e44dc1396f713e8b3407000e6390add72d49633ff3bb"}, - {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f05ed49f434ce396ddcf99e9fd98245328e99f991283850c309f5e3182211a79"}, - {file = "ijson-3.2.3-cp310-cp310-win32.whl", hash = "sha256:b4eb2304573c9fdf448d3fa4a4fdcb727b93002b5c5c56c14a5ffbbc39f64ae4"}, - {file = "ijson-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:923131f5153c70936e8bd2dd9dcfcff43c67a3d1c789e9c96724747423c173eb"}, - {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:904f77dd3d87736ff668884fe5197a184748eb0c3e302ded61706501d0327465"}, - {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0974444c1f416e19de1e9f567a4560890095e71e81623c509feff642114c1e53"}, - {file = "ijson-3.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1a4b8eb69b6d7b4e94170aa991efad75ba156b05f0de2a6cd84f991def12ff9"}, - {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d052417fd7ce2221114f8d3b58f05a83c1a2b6b99cafe0b86ac9ed5e2fc889df"}, - {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b8064a85ec1b0beda7dd028e887f7112670d574db606f68006c72dd0bb0e0e2"}, - {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaac293853f1342a8d2a45ac1f723c860f700860e7743fb97f7b76356df883a8"}, - {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6c32c18a934c1dc8917455b0ce478fd7a26c50c364bd52c5a4fb0fc6bb516af7"}, - {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:713a919e0220ac44dab12b5fed74f9130f3480e55e90f9d80f58de129ea24f83"}, - {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, - {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, - {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, - {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, - {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, - {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, - {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85afdb3f3a5d0011584d4fa8e6dccc5936be51c27e84cd2882fe904ca3bd04c5"}, - {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4fc35d569eff3afa76bfecf533f818ecb9390105be257f3f83c03204661ace70"}, - {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:455d7d3b7a6aacfb8ab1ebcaf697eedf5be66e044eac32508fccdc633d995f0e"}, - {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c63f3d57dbbac56cead05b12b81e8e1e259f14ce7f233a8cbe7fa0996733b628"}, - {file = "ijson-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:a4d7fe3629de3ecb088bff6dfe25f77be3e8261ed53d5e244717e266f8544305"}, - {file = "ijson-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:96190d59f015b5a2af388a98446e411f58ecc6a93934e036daa75f75d02386a0"}, - {file = "ijson-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35194e0b8a2bda12b4096e2e792efa5d4801a0abb950c48ade351d479cd22ba5"}, - {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1053fb5f0b010ee76ca515e6af36b50d26c1728ad46be12f1f147a835341083"}, - {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:211124cff9d9d139dd0dfced356f1472860352c055d2481459038b8205d7d742"}, - {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92dc4d48e9f6a271292d6079e9fcdce33c83d1acf11e6e12696fb05c5889fe74"}, - {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3dcc33ee56f92a77f48776014ddb47af67c33dda361e84371153c4f1ed4434e1"}, - {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98c6799925a5d1988da4cd68879b8eeab52c6e029acc45e03abb7921a4715c4b"}, - {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4252e48c95cd8ceefc2caade310559ab61c37d82dfa045928ed05328eb5b5f65"}, - {file = "ijson-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:644f4f03349ff2731fd515afd1c91b9e439e90c9f8c28292251834154edbffca"}, - {file = "ijson-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:ba33c764afa9ecef62801ba7ac0319268a7526f50f7601370d9f8f04e77fc02b"}, - {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4b2ec8c2a3f1742cbd5f36b65e192028e541b5fd8c7fd97c1fc0ca6c427c704a"}, - {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7dc357da4b4ebd8903e77dbcc3ce0555ee29ebe0747c3c7f56adda423df8ec89"}, - {file = "ijson-3.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcc51c84bb220ac330122468fe526a7777faa6464e3b04c15b476761beea424f"}, - {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8d54b624629f9903005c58d9321a036c72f5c212701bbb93d1a520ecd15e370"}, - {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6ea7c7e3ec44742e867c72fd750c6a1e35b112f88a917615332c4476e718d40"}, - {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:916acdc5e504f8b66c3e287ada5d4b39a3275fc1f2013c4b05d1ab9933671a6c"}, - {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81815b4184b85ce124bfc4c446d5f5e5e643fc119771c5916f035220ada29974"}, - {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b49fd5fe1cd9c1c8caf6c59f82b08117dd6bea2ec45b641594e25948f48f4169"}, - {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:86b3c91fdcb8ffb30556c9669930f02b7642de58ca2987845b04f0d7fe46d9a8"}, - {file = "ijson-3.2.3-cp38-cp38-win32.whl", hash = "sha256:a729b0c8fb935481afe3cf7e0dadd0da3a69cc7f145dbab8502e2f1e01d85a7c"}, - {file = "ijson-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:d34e049992d8a46922f96483e96b32ac4c9cffd01a5c33a928e70a283710cd58"}, - {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9c2a12dcdb6fa28f333bf10b3a0f80ec70bc45280d8435be7e19696fab2bc706"}, - {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1844c5b57da21466f255a0aeddf89049e730d7f3dfc4d750f0e65c36e6a61a7c"}, - {file = "ijson-3.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ec3e5ff2515f1c40ef6a94983158e172f004cd643b9e4b5302017139b6c96e4"}, - {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46bafb1b9959872a1f946f8dd9c6f1a30a970fc05b7bfae8579da3f1f988e598"}, - {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab4db9fee0138b60e31b3c02fff8a4c28d7b152040553b6a91b60354aebd4b02"}, - {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4bc87e69d1997c6a55fff5ee2af878720801ff6ab1fb3b7f94adda050651e37"}, - {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9fd906f0c38e9f0bfd5365e1bed98d649f506721f76bb1a9baa5d7374f26f19"}, - {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e84d27d1acb60d9102728d06b9650e5b7e5cb0631bd6e3dfadba8fb6a80d6c2f"}, - {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2cc04fc0a22bb945cd179f614845c8b5106c0b3939ee0d84ce67c7a61ac1a936"}, - {file = "ijson-3.2.3-cp39-cp39-win32.whl", hash = "sha256:e641814793a037175f7ec1b717ebb68f26d89d82cfd66f36e588f32d7e488d5f"}, - {file = "ijson-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:6bd3e7e91d031f1e8cea7ce53f704ab74e61e505e8072467e092172422728b22"}, - {file = "ijson-3.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06f9707da06a19b01013f8c65bf67db523662a9b4a4ff027e946e66c261f17f0"}, - {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be8495f7c13fa1f622a2c6b64e79ac63965b89caf664cc4e701c335c652d15f2"}, - {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7596b42f38c3dcf9d434dddd50f46aeb28e96f891444c2b4b1266304a19a2c09"}, - {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbac4e9609a1086bbad075beb2ceec486a3b138604e12d2059a33ce2cba93051"}, - {file = "ijson-3.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:db2d6341f9cb538253e7fe23311d59252f124f47165221d3c06a7ed667ecd595"}, - {file = "ijson-3.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fa8b98be298efbb2588f883f9953113d8a0023ab39abe77fe734b71b46b1220a"}, - {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:674e585361c702fad050ab4c153fd168dc30f5980ef42b64400bc84d194e662d"}, - {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd12e42b9cb9c0166559a3ffa276b4f9fc9d5b4c304e5a13668642d34b48b634"}, - {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d31e0d771d82def80cd4663a66de277c3b44ba82cd48f630526b52f74663c639"}, - {file = "ijson-3.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ce4c70c23521179d6da842bb9bc2e36bb9fad1e0187e35423ff0f282890c9ca"}, - {file = "ijson-3.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39f551a6fbeed4433c85269c7c8778e2aaea2501d7ebcb65b38f556030642c17"}, - {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b14d322fec0de7af16f3ef920bf282f0dd747200b69e0b9628117f381b7775b"}, - {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7851a341429b12d4527ca507097c959659baf5106c7074d15c17c387719ffbcd"}, - {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db3bf1b42191b5cc9b6441552fdcb3b583594cb6b19e90d1578b7cbcf80d0fae"}, - {file = "ijson-3.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6f662dc44362a53af3084d3765bb01cd7b4734d1f484a6095cad4cb0cbfe5374"}, - {file = "ijson-3.2.3.tar.gz", hash = "sha256:10294e9bf89cb713da05bc4790bdff616610432db561964827074898e174f917"}, + {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, + {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, + {file = "ijson-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b73b493af9e947caed75d329676b1b801d673b17481962823a3e55fe529c8b8b"}, + {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5576415f3d76290b160aa093ff968f8bf6de7d681e16e463a0134106b506f49"}, + {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e9ffe358d5fdd6b878a8a364e96e15ca7ca57b92a48f588378cef315a8b019e"}, + {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8643c255a25824ddd0895c59f2319c019e13e949dc37162f876c41a283361527"}, + {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:df3ab5e078cab19f7eaeef1d5f063103e1ebf8c26d059767b26a6a0ad8b250a3"}, + {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dc1fb02c6ed0bae1b4bf96971258bf88aea72051b6e4cebae97cff7090c0607"}, + {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e9afd97339fc5a20f0542c971f90f3ca97e73d3050cdc488d540b63fae45329a"}, + {file = "ijson-3.3.0-cp310-cp310-win32.whl", hash = "sha256:844c0d1c04c40fd1b60f148dc829d3f69b2de789d0ba239c35136efe9a386529"}, + {file = "ijson-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d654d045adafdcc6c100e8e911508a2eedbd2a1b5f93f930ba13ea67d7704ee9"}, + {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:501dce8eaa537e728aa35810656aa00460a2547dcb60937c8139f36ec344d7fc"}, + {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658ba9cad0374d37b38c9893f4864f284cdcc7d32041f9808fba8c7bcaadf134"}, + {file = "ijson-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2636cb8c0f1023ef16173f4b9a233bcdb1df11c400c603d5f299fac143ca8d70"}, + {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd174b90db68c3bcca273e9391934a25d76929d727dc75224bf244446b28b03b"}, + {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97a9aea46e2a8371c4cf5386d881de833ed782901ac9f67ebcb63bb3b7d115af"}, + {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c594c0abe69d9d6099f4ece17763d53072f65ba60b372d8ba6de8695ce6ee39e"}, + {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e0ff16c224d9bfe4e9e6bd0395826096cda4a3ef51e6c301e1b61007ee2bd24"}, + {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0015354011303175eae7e2ef5136414e91de2298e5a2e9580ed100b728c07e51"}, + {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034642558afa57351a0ffe6de89e63907c4cf6849070cc10a3b2542dccda1afe"}, + {file = "ijson-3.3.0-cp311-cp311-win32.whl", hash = "sha256:192e4b65495978b0bce0c78e859d14772e841724d3269fc1667dc6d2f53cc0ea"}, + {file = "ijson-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:72e3488453754bdb45c878e31ce557ea87e1eb0f8b4fc610373da35e8074ce42"}, + {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:988e959f2f3d59ebd9c2962ae71b97c0df58323910d0b368cc190ad07429d1bb"}, + {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2f73f0d0fce5300f23a1383d19b44d103bb113b57a69c36fd95b7c03099b181"}, + {file = "ijson-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ee57a28c6bf523d7cb0513096e4eb4dac16cd935695049de7608ec110c2b751"}, + {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0155a8f079c688c2ccaea05de1ad69877995c547ba3d3612c1c336edc12a3a5"}, + {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ab00721304af1ae1afa4313ecfa1bf16b07f55ef91e4a5b93aeaa3e2bd7917c"}, + {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40ee3821ee90be0f0e95dcf9862d786a7439bd1113e370736bfdf197e9765bfb"}, + {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3b6987a0bc3e6d0f721b42c7a0198ef897ae50579547b0345f7f02486898f5"}, + {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:63afea5f2d50d931feb20dcc50954e23cef4127606cc0ecf7a27128ed9f9a9e6"}, + {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b5c3e285e0735fd8c5a26d177eca8b52512cdd8687ca86ec77a0c66e9c510182"}, + {file = "ijson-3.3.0-cp312-cp312-win32.whl", hash = "sha256:907f3a8674e489abdcb0206723e5560a5cb1fa42470dcc637942d7b10f28b695"}, + {file = "ijson-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8f890d04ad33262d0c77ead53c85f13abfb82f2c8f078dfbf24b78f59534dfdd"}, + {file = "ijson-3.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b9d85a02e77ee8ea6d9e3fd5d515bcc3d798d9c1ea54817e5feb97a9bc5d52fe"}, + {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6576cdc36d5a09b0c1a3d81e13a45d41a6763188f9eaae2da2839e8a4240bce"}, + {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5589225c2da4bb732c9c370c5961c39a6db72cf69fb2a28868a5413ed7f39e6"}, + {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad04cf38164d983e85f9cba2804566c0160b47086dcca4cf059f7e26c5ace8ca"}, + {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:a3b730ef664b2ef0e99dec01b6573b9b085c766400af363833e08ebc1e38eb2f"}, + {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:4690e3af7b134298055993fcbea161598d23b6d3ede11b12dca6815d82d101d5"}, + {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:aaa6bfc2180c31a45fac35d40e3312a3d09954638ce0b2e9424a88e24d262a13"}, + {file = "ijson-3.3.0-cp36-cp36m-win32.whl", hash = "sha256:44367090a5a876809eb24943f31e470ba372aaa0d7396b92b953dda953a95d14"}, + {file = "ijson-3.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7e2b3e9ca957153557d06c50a26abaf0d0d6c0ddf462271854c968277a6b5372"}, + {file = "ijson-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47c144117e5c0e2babb559bc8f3f76153863b8dd90b2d550c51dab5f4b84a87f"}, + {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ce02af5fbf9ba6abb70765e66930aedf73311c7d840478f1ccecac53fefbf3"}, + {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac6c3eeed25e3e2cb9b379b48196413e40ac4e2239d910bb33e4e7f6c137745"}, + {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d92e339c69b585e7b1d857308ad3ca1636b899e4557897ccd91bb9e4a56c965b"}, + {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8c85447569041939111b8c7dbf6f8fa7a0eb5b2c4aebb3c3bec0fb50d7025121"}, + {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:542c1e8fddf082159a5d759ee1412c73e944a9a2412077ed00b303ff796907dc"}, + {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:30cfea40936afb33b57d24ceaf60d0a2e3d5c1f2335ba2623f21d560737cc730"}, + {file = "ijson-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:6b661a959226ad0d255e49b77dba1d13782f028589a42dc3172398dd3814c797"}, + {file = "ijson-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0b003501ee0301dbf07d1597482009295e16d647bb177ce52076c2d5e64113e0"}, + {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e8d8de44effe2dbd0d8f3eb9840344b2d5b4cc284a14eb8678aec31d1b6bea8"}, + {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9cd5c03c63ae06d4f876b9844c5898d0044c7940ff7460db9f4cd984ac7862b5"}, + {file = "ijson-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04366e7e4a4078d410845e58a2987fd9c45e63df70773d7b6e87ceef771b51ee"}, + {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de7c1ddb80fa7a3ab045266dca169004b93f284756ad198306533b792774f10a"}, + {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8851584fb931cffc0caa395f6980525fd5116eab8f73ece9d95e6f9c2c326c4c"}, + {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdcfc88347fd981e53c33d832ce4d3e981a0d696b712fbcb45dcc1a43fe65c65"}, + {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3917b2b3d0dbbe3296505da52b3cb0befbaf76119b2edaff30bd448af20b5400"}, + {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e10c14535abc7ddf3fd024aa36563cd8ab5d2bb6234a5d22c77c30e30fa4fb2b"}, + {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3aba5c4f97f4e2ce854b5591a8b0711ca3b0c64d1b253b04ea7b004b0a197ef6"}, + {file = "ijson-3.3.0-cp38-cp38-win32.whl", hash = "sha256:b325f42e26659df1a0de66fdb5cde8dd48613da9c99c07d04e9fb9e254b7ee1c"}, + {file = "ijson-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:ff835906f84451e143f31c4ce8ad73d83ef4476b944c2a2da91aec8b649570e1"}, + {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c556f5553368dff690c11d0a1fb435d4ff1f84382d904ccc2dc53beb27ba62e"}, + {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4396b55a364a03ff7e71a34828c3ed0c506814dd1f50e16ebed3fc447d5188e"}, + {file = "ijson-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6850ae33529d1e43791b30575070670070d5fe007c37f5d06aebc1dd152ab3f"}, + {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36aa56d68ea8def26778eb21576ae13f27b4a47263a7a2581ab2ef58b8de4451"}, + {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7ec759c4a0fc820ad5dc6a58e9c391e7b16edcb618056baedbedbb9ea3b1524"}, + {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b51bab2c4e545dde93cb6d6bb34bf63300b7cd06716f195dd92d9255df728331"}, + {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92355f95a0e4da96d4c404aa3cff2ff033f9180a9515f813255e1526551298c1"}, + {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8795e88adff5aa3c248c1edce932db003d37a623b5787669ccf205c422b91e4a"}, + {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8f83f553f4cde6d3d4eaf58ec11c939c94a0ec545c5b287461cafb184f4b3a14"}, + {file = "ijson-3.3.0-cp39-cp39-win32.whl", hash = "sha256:ead50635fb56577c07eff3e557dac39533e0fe603000684eea2af3ed1ad8f941"}, + {file = "ijson-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8a9befb0c0369f0cf5c1b94178d0d78f66d9cebb9265b36be6e4f66236076b8"}, + {file = "ijson-3.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2af323a8aec8a50fa9effa6d640691a30a9f8c4925bd5364a1ca97f1ac6b9b5c"}, + {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f64f01795119880023ba3ce43072283a393f0b90f52b66cc0ea1a89aa64a9ccb"}, + {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a716e05547a39b788deaf22725490855337fc36613288aa8ae1601dc8c525553"}, + {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f5d921fadc135d1ad698e2697025045cd8ed7e5e842258295012d8a3bc702"}, + {file = "ijson-3.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd26b396bc3a1e85f4acebeadbf627fa6117b97f4c10b177d5779577c6607744"}, + {file = "ijson-3.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:25fd49031cdf5fd5f1fd21cb45259a64dad30b67e64f745cc8926af1c8c243d3"}, + {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b72178b1e565d06ab19319965022b36ef41bcea7ea153b32ec31194bec032a2"}, + {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d0b6b637d05dbdb29d0bfac2ed8425bb369e7af5271b0cc7cf8b801cb7360c2"}, + {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378d0baa59ae422905c5f182ea0fd74fe7e52a23e3821067a7d58c8306b2191"}, + {file = "ijson-3.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:99f5c8ab048ee4233cc4f2b461b205cbe01194f6201018174ac269bf09995749"}, + {file = "ijson-3.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45ff05de889f3dc3d37a59d02096948ce470699f2368b32113954818b21aa74a"}, + {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efb521090dd6cefa7aafd120581947b29af1713c902ff54336b7c7130f04c47"}, + {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c727691858fd3a1c085d9980d12395517fcbbf02c69fbb22dede8ee03422da"}, + {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0420c24e50389bc251b43c8ed379ab3e3ba065ac8262d98beb6735ab14844460"}, + {file = "ijson-3.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8fdf3721a2aa7d96577970f5604bd81f426969c1822d467f07b3d844fa2fecc7"}, + {file = "ijson-3.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:891f95c036df1bc95309951940f8eea8537f102fa65715cdc5aae20b8523813b"}, + {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1336a2a6e5c427f419da0154e775834abcbc8ddd703004108121c6dd9eba9d"}, + {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0c819f83e4f7b7f7463b2dc10d626a8be0c85fbc7b3db0edc098c2b16ac968e"}, + {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33afc25057377a6a43c892de34d229a86f89ea6c4ca3dd3db0dcd17becae0dbb"}, + {file = "ijson-3.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7914d0cf083471856e9bc2001102a20f08e82311dfc8cf1a91aa422f9414a0d6"}, + {file = "ijson-3.3.0.tar.gz", hash = "sha256:7f172e6ba1bee0d4c8f8ebd639577bfe429dee0f3f96775a067b8bae4492d8a0"}, ] [[package]] @@ -2367,13 +2376,13 @@ testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -3144,13 +3153,13 @@ testing = ["beautifulsoup4", "coverage", "docutils (>=0.17.0,<0.18.0)", "pytest [[package]] name = "nautobot" -version = "2.2.2" +version = "2.2.5" description = "Source of truth and network automation platform." optional = false python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot-2.2.2-py3-none-any.whl", hash = "sha256:4a77b13d60f004815007d519f29602bba5b9ff899d235bb055a64ce9b6f461ce"}, - {file = "nautobot-2.2.2.tar.gz", hash = "sha256:502fd0bf8691900b1c86c307e8bb3765990890a02e656c4af8e0b9cc3b7cc6f4"}, + {file = "nautobot-2.2.5-py3-none-any.whl", hash = "sha256:8b4256cb5f76b13d56c754b8a04e2869bc78d6a6593b2e7aae8094073320cb49"}, + {file = "nautobot-2.2.5.tar.gz", hash = "sha256:0b0ac6aae922092dad271feccfef3efe1e1482284b23d0acbdb0c61f78227b57"}, ] [package.dependencies] @@ -3181,7 +3190,7 @@ emoji = ">=2.11.0,<2.12.0" GitPython = ">=3.1.43,<3.2.0" graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" -Jinja2 = ">=3.1.3,<3.2.0" +Jinja2 = ">=3.1.4,<3.2.0" jsonschema = ">=4.7.0,<5.0.0" Markdown = ">=3.5.2,<3.6.0" MarkupSafe = ">=2.1.5,<2.2.0" @@ -3195,16 +3204,16 @@ psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.3,<8.1.0" pyuwsgi = ">=2.0.23,<2.1.0" PyYAML = ">=6.0,<6.1" -social-auth-app-django = ">=5.4.0,<5.5.0" +social-auth-app-django = ">=5.4.1,<5.5.0" svgwrite = ">=1.4.2,<1.5.0" [package.extras] -all = ["django-auth-ldap (>=4.7.0,<4.8.0)", "django-storages (>=1.14.2,<1.15.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] +all = ["django-auth-ldap (>=4.7.0,<4.8.0)", "django-storages (>=1.14.2,<1.15.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[saml] (>=4.5.3,<4.6.0)"] ldap = ["django-auth-ldap (>=4.7.0,<4.8.0)"] mysql = ["mysqlclient (>=2.2.3,<2.3.0)"] napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.14.2,<1.15.0)"] -sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] +sso = ["social-auth-core[saml] (>=4.5.3,<4.6.0)"] [[package]] name = "nautobot-capacity-metrics" @@ -3575,18 +3584,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "prometheus-client" @@ -3620,13 +3630,13 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] @@ -3634,33 +3644,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "3.20.3" -description = "Protocol Buffers" +version = "4.25.3" +description = "" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, - {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, - {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, - {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, - {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, - {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, - {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, - {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, - {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, - {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, - {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, - {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, - {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, - {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, - {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, - {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -3696,6 +3695,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -3704,6 +3704,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -3766,6 +3768,17 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "pycares" version = "4.4.0" @@ -3869,47 +3882,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.15" +version = "1.10.16" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, + {file = "pydantic-1.10.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a539ac40551b01a85e899829aa43ca8036707474af8d74b48be288d4d2d2846"}, + {file = "pydantic-1.10.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a4fcc7b0b8038dbda2dda642cff024032dfae24a7960cc58e57a39eb1949b9b"}, + {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4660dd697de1ae2d4305a85161312611f64d5360663a9ba026cd6ad9e3fe14c3"}, + {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:900a787c574f903a97d0bf52a43ff3b6cf4fa0119674bcfc0e5fd1056d388ad9"}, + {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d30192a63e6d3334c3f0c0506dd6ae9f1dce7b2f8845518915291393a5707a22"}, + {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:16cf23ed599ca5ca937e37ba50ab114e6b5c387eb43a6cc533701605ad1be611"}, + {file = "pydantic-1.10.16-cp310-cp310-win_amd64.whl", hash = "sha256:8d23111f41d1e19334edd51438fd57933f3eee7d9d2fa8cc3f5eda515a272055"}, + {file = "pydantic-1.10.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef287b8d7fc0e86a8bd1f902c61aff6ba9479c50563242fe88ba39692e98e1e0"}, + {file = "pydantic-1.10.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b9ded699bfd3b3912d796ff388b0c607e6d35d41053d37aaf8fd6082c660de9a"}, + {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daeb199814333e4426c5e86d7fb610f4e230289f28cab90eb4de27330bef93cf"}, + {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5973843f1fa99ec6c3ac8d1a8698ac9340b35e45cca6c3e5beb5c3bd1ef15de6"}, + {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6b8a7788a8528a558828fe4a48783cafdcf2612d13c491594a8161dc721629c"}, + {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8abaecf54dacc9d991dda93c3b880d41092a8924cde94eeb811d7d9ab55df7d8"}, + {file = "pydantic-1.10.16-cp311-cp311-win_amd64.whl", hash = "sha256:ddc7b682fbd23f051edc419dc6977e11dd2dbdd0cef9d05f0e15d1387862d230"}, + {file = "pydantic-1.10.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:067c2b5539f7839653ad8c3d1fc2f1343338da8677b7b2172abf3cd3fdc8f719"}, + {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d1fc943583c046ecad0ff5d6281ee571b64e11b5503d9595febdce54f38b290"}, + {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18548b30ccebe71d380b0886cc44ea5d80afbcc155e3518792f13677ad06097d"}, + {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4e92292f9580fc5ea517618580fac24e9f6dc5657196e977c194a8e50e14f5a9"}, + {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5da8bc4bb4f85b8c97cc7f11141fddbbd29eb25e843672e5807e19cc3d7c1b7f"}, + {file = "pydantic-1.10.16-cp37-cp37m-win_amd64.whl", hash = "sha256:a04ee1ea34172b87707a6ecfcdb120d7656892206b7c4dbdb771a73e90179fcb"}, + {file = "pydantic-1.10.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4fa86469fd46e732242c7acb83282d33f83591a7e06f840481327d5bf6d96112"}, + {file = "pydantic-1.10.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:89c2783dc261726fe7a5ce1121bce29a2f7eb9b1e704c68df2b117604e3b346f"}, + {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78e59fa919fa7a192f423d190d8660c35dd444efa9216662273f36826765424b"}, + {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7e82a80068c77f4b074032e031e642530b6d45cb8121fc7c99faa31fb6c6b72"}, + {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d82d5956cee27a30e26a5b88d00a6a2a15a4855e13c9baf50175976de0dc282c"}, + {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b7b99424cc0970ff08deccb549b5a6ec1040c0b449eab91723e64df2bd8fdca"}, + {file = "pydantic-1.10.16-cp38-cp38-win_amd64.whl", hash = "sha256:d97a35e1ba59442775201657171f601a2879e63517a55862a51f8d67cdfc0017"}, + {file = "pydantic-1.10.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d91f6866fd3e303c632207813ef6bc4d86055e21c5e5a0a311983a9ac5f0192"}, + {file = "pydantic-1.10.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8d3c71d14c8bd26d2350c081908dbf59d5a6a8f9596d9ef2b09cc1e61c8662b"}, + {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b73e6386b439b4881d79244e9fc1e32d1e31e8d784673f5d58a000550c94a6c0"}, + {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f039881fb2ef86f6de6eacce6e71701b47500355738367413ccc1550b2a69cf"}, + {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3895ddb26f22bdddee7e49741486aa7b389258c6f6771943e87fc00eabd79134"}, + {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55b945da2756b5cef93d792521ad0d457fdf2f69fd5a2d10a27513f5281717dd"}, + {file = "pydantic-1.10.16-cp39-cp39-win_amd64.whl", hash = "sha256:22dd265c77c3976a34be78409b128cb84629284dfd1b69d2fa1507a36f84dc8b"}, + {file = "pydantic-1.10.16-py3-none-any.whl", hash = "sha256:aa2774ba5412fd1c5cb890d08e8b0a3bb5765898913ba1f61a65a4810f03cf29"}, + {file = "pydantic-1.10.16.tar.gz", hash = "sha256:8bb388f6244809af69ee384900b10b677a69f1980fdc655ea419710cffcb5610"}, ] [package.dependencies] @@ -3932,17 +3945,16 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -3964,17 +3976,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "3.1.0" +version = "3.2.3" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, - {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, + {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, + {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, ] [package.dependencies] -astroid = ">=3.1.0,<=3.2.0-dev0" +astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -4127,13 +4139,12 @@ files = [ [[package]] name = "python-crontab" -version = "3.0.0" +version = "3.1.0" description = "Python Crontab API" optional = false python-versions = "*" files = [ - {file = "python-crontab-3.0.0.tar.gz", hash = "sha256:79fb7465039ddfd4fb93d072d6ee0d45c1ac8bf1597f0686ea14fd4361dba379"}, - {file = "python_crontab-3.0.0-py3-none-any.whl", hash = "sha256:6d5ba3c190ec76e4d252989a1644fcb233dbf53fbc8fceeb9febe1657b9fb1d4"}, + {file = "python-crontab-3.1.0.tar.gz", hash = "sha256:f4ea1605d24533b67fa7a634ef26cb59a5f2e7954f6e677d2d7a2229959a2fc8"}, ] [package.dependencies] @@ -4292,6 +4303,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4299,8 +4311,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -4317,6 +4337,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -4324,6 +4345,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -4345,13 +4367,13 @@ pyyaml = "*" [[package]] name = "redis" -version = "5.0.3" +version = "5.0.6" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"}, - {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"}, + {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, + {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, ] [package.dependencies] @@ -4363,115 +4385,101 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "regex" -version = "2024.4.16" +version = "2024.5.15" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb83cc090eac63c006871fd24db5e30a1f282faa46328572661c0a24a2323a08"}, - {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c91e1763696c0eb66340c4df98623c2d4e77d0746b8f8f2bee2c6883fd1fe18"}, - {file = "regex-2024.4.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10188fe732dec829c7acca7422cdd1bf57d853c7199d5a9e96bb4d40db239c73"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:956b58d692f235cfbf5b4f3abd6d99bf102f161ccfe20d2fd0904f51c72c4c66"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a70b51f55fd954d1f194271695821dd62054d949efd6368d8be64edd37f55c86"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c02fcd2bf45162280613d2e4a1ca3ac558ff921ae4e308ecb307650d3a6ee51"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ed75ea6892a56896d78f11006161eea52c45a14994794bcfa1654430984b22"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd727ad276bb91928879f3aa6396c9a1d34e5e180dce40578421a691eeb77f47"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7cbc5d9e8a1781e7be17da67b92580d6ce4dcef5819c1b1b89f49d9678cc278c"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78fddb22b9ef810b63ef341c9fcf6455232d97cfe03938cbc29e2672c436670e"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:445ca8d3c5a01309633a0c9db57150312a181146315693273e35d936472df912"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:95399831a206211d6bc40224af1c635cb8790ddd5c7493e0bd03b85711076a53"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7731728b6568fc286d86745f27f07266de49603a6fdc4d19c87e8c247be452af"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4facc913e10bdba42ec0aee76d029aedda628161a7ce4116b16680a0413f658a"}, - {file = "regex-2024.4.16-cp310-cp310-win32.whl", hash = "sha256:911742856ce98d879acbea33fcc03c1d8dc1106234c5e7d068932c945db209c0"}, - {file = "regex-2024.4.16-cp310-cp310-win_amd64.whl", hash = "sha256:e0a2df336d1135a0b3a67f3bbf78a75f69562c1199ed9935372b82215cddd6e2"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1210365faba7c2150451eb78ec5687871c796b0f1fa701bfd2a4a25420482d26"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ab40412f8cd6f615bfedea40c8bf0407d41bf83b96f6fc9ff34976d6b7037fd"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd80d1280d473500d8086d104962a82d77bfbf2b118053824b7be28cd5a79ea5"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb966fdd9217e53abf824f437a5a2d643a38d4fd5fd0ca711b9da683d452969"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20b7a68444f536365af42a75ccecb7ab41a896a04acf58432db9e206f4e525d6"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74586dd0b039c62416034f811d7ee62810174bb70dffcca6439f5236249eb09"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8290b44d8b0af4e77048646c10c6e3aa583c1ca67f3b5ffb6e06cf0c6f0f89"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2d80a6749724b37853ece57988b39c4e79d2b5fe2869a86e8aeae3bbeef9eb0"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a1018e97aeb24e4f939afcd88211ace472ba566efc5bdf53fd8fd7f41fa7170"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d015604ee6204e76569d2f44e5a210728fa917115bef0d102f4107e622b08d5"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3d5ac5234fb5053850d79dd8eb1015cb0d7d9ed951fa37aa9e6249a19aa4f336"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a38d151e2cdd66d16dab550c22f9521ba79761423b87c01dae0a6e9add79c0d"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159dc4e59a159cb8e4e8f8961eb1fa5d58f93cb1acd1701d8aff38d45e1a84a6"}, - {file = "regex-2024.4.16-cp311-cp311-win32.whl", hash = "sha256:ba2336d6548dee3117520545cfe44dc28a250aa091f8281d28804aa8d707d93d"}, - {file = "regex-2024.4.16-cp311-cp311-win_amd64.whl", hash = "sha256:8f83b6fd3dc3ba94d2b22717f9c8b8512354fd95221ac661784df2769ea9bba9"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80b696e8972b81edf0af2a259e1b2a4a661f818fae22e5fa4fa1a995fb4a40fd"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d61ae114d2a2311f61d90c2ef1358518e8f05eafda76eaf9c772a077e0b465ec"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ba6745440b9a27336443b0c285d705ce73adb9ec90e2f2004c64d95ab5a7598"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295004b2dd37b0835ea5c14a33e00e8cfa3c4add4d587b77287825f3418d310"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aba818dcc7263852aabb172ec27b71d2abca02a593b95fa79351b2774eb1d2b"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0800631e565c47520aaa04ae38b96abc5196fe8b4aa9bd864445bd2b5848a7a"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08dea89f859c3df48a440dbdcd7b7155bc675f2fa2ec8c521d02dc69e877db70"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eeaa0b5328b785abc344acc6241cffde50dc394a0644a968add75fcefe15b9d4"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4e819a806420bc010489f4e741b3036071aba209f2e0989d4750b08b12a9343f"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c2d0e7cbb6341e830adcbfa2479fdeebbfbb328f11edd6b5675674e7a1e37730"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:91797b98f5e34b6a49f54be33f72e2fb658018ae532be2f79f7c63b4ae225145"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:d2da13568eff02b30fd54fccd1e042a70fe920d816616fda4bf54ec705668d81"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:370c68dc5570b394cbaadff50e64d705f64debed30573e5c313c360689b6aadc"}, - {file = "regex-2024.4.16-cp312-cp312-win32.whl", hash = "sha256:904c883cf10a975b02ab3478bce652f0f5346a2c28d0a8521d97bb23c323cc8b"}, - {file = "regex-2024.4.16-cp312-cp312-win_amd64.whl", hash = "sha256:785c071c982dce54d44ea0b79cd6dfafddeccdd98cfa5f7b86ef69b381b457d9"}, - {file = "regex-2024.4.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2f142b45c6fed48166faeb4303b4b58c9fcd827da63f4cf0a123c3480ae11fb"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87ab229332ceb127a165612d839ab87795972102cb9830e5f12b8c9a5c1b508"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81500ed5af2090b4a9157a59dbc89873a25c33db1bb9a8cf123837dcc9765047"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b340cccad138ecb363324aa26893963dcabb02bb25e440ebdf42e30963f1a4e0"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c72608e70f053643437bd2be0608f7f1c46d4022e4104d76826f0839199347a"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe2305e6232ef3e8f40bfc0f0f3a04def9aab514910fa4203bafbc0bb4682"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:03576e3a423d19dda13e55598f0fd507b5d660d42c51b02df4e0d97824fdcae3"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:549c3584993772e25f02d0656ac48abdda73169fe347263948cf2b1cead622f3"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:34422d5a69a60b7e9a07a690094e824b66f5ddc662a5fc600d65b7c174a05f04"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f580c651a72b75c39e311343fe6875d6f58cf51c471a97f15a938d9fe4e0d37"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3399dd8a7495bbb2bacd59b84840eef9057826c664472e86c91d675d007137f5"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d1f86f3f4e2388aa3310b50694ac44daefbd1681def26b4519bd050a398dc5a"}, - {file = "regex-2024.4.16-cp37-cp37m-win32.whl", hash = "sha256:dd5acc0a7d38fdc7a3a6fd3ad14c880819008ecb3379626e56b163165162cc46"}, - {file = "regex-2024.4.16-cp37-cp37m-win_amd64.whl", hash = "sha256:ba8122e3bb94ecda29a8de4cf889f600171424ea586847aa92c334772d200331"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:743deffdf3b3481da32e8a96887e2aa945ec6685af1cfe2bcc292638c9ba2f48"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7571f19f4a3fd00af9341c7801d1ad1967fc9c3f5e62402683047e7166b9f2b4"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df79012ebf6f4efb8d307b1328226aef24ca446b3ff8d0e30202d7ebcb977a8c"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e757d475953269fbf4b441207bb7dbdd1c43180711b6208e129b637792ac0b93"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4313ab9bf6a81206c8ac28fdfcddc0435299dc88cad12cc6305fd0e78b81f9e4"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d83c2bc678453646f1a18f8db1e927a2d3f4935031b9ad8a76e56760461105dd"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df1bfef97db938469ef0a7354b2d591a2d438bc497b2c489471bec0e6baf7c4"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62120ed0de69b3649cc68e2965376048793f466c5a6c4370fb27c16c1beac22d"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c2ef6f7990b6e8758fe48ad08f7e2f66c8f11dc66e24093304b87cae9037bb4a"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8fc6976a3395fe4d1fbeb984adaa8ec652a1e12f36b56ec8c236e5117b585427"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:03e68f44340528111067cecf12721c3df4811c67268b897fbe695c95f860ac42"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ec7e0043b91115f427998febaa2beb82c82df708168b35ece3accb610b91fac1"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c21fc21a4c7480479d12fd8e679b699f744f76bb05f53a1d14182b31f55aac76"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12f6a3f2f58bb7344751919a1876ee1b976fe08b9ffccb4bbea66f26af6017b9"}, - {file = "regex-2024.4.16-cp38-cp38-win32.whl", hash = "sha256:479595a4fbe9ed8f8f72c59717e8cf222da2e4c07b6ae5b65411e6302af9708e"}, - {file = "regex-2024.4.16-cp38-cp38-win_amd64.whl", hash = "sha256:0534b034fba6101611968fae8e856c1698da97ce2efb5c2b895fc8b9e23a5834"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7ccdd1c4a3472a7533b0a7aa9ee34c9a2bef859ba86deec07aff2ad7e0c3b94"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f2f017c5be19984fbbf55f8af6caba25e62c71293213f044da3ada7091a4455"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:803b8905b52de78b173d3c1e83df0efb929621e7b7c5766c0843704d5332682f"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:684008ec44ad275832a5a152f6e764bbe1914bea10968017b6feaecdad5736e0"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65436dce9fdc0aeeb0a0effe0839cb3d6a05f45aa45a4d9f9c60989beca78b9c"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea355eb43b11764cf799dda62c658c4d2fdb16af41f59bb1ccfec517b60bcb07"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c1165f3809ce7774f05cb74e5408cd3aa93ee8573ae959a97a53db3ca3180d"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cccc79a9be9b64c881f18305a7c715ba199e471a3973faeb7ba84172abb3f317"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00169caa125f35d1bca6045d65a662af0202704489fada95346cfa092ec23f39"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6cc38067209354e16c5609b66285af17a2863a47585bcf75285cab33d4c3b8df"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:23cff1b267038501b179ccbbd74a821ac4a7192a1852d1d558e562b507d46013"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d320b3bf82a39f248769fc7f188e00f93526cc0fe739cfa197868633d44701"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:89ec7f2c08937421bbbb8b48c54096fa4f88347946d4747021ad85f1b3021b3c"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4918fd5f8b43aa7ec031e0fef1ee02deb80b6afd49c85f0790be1dc4ce34cb50"}, - {file = "regex-2024.4.16-cp39-cp39-win32.whl", hash = "sha256:684e52023aec43bdf0250e843e1fdd6febbe831bd9d52da72333fa201aaa2335"}, - {file = "regex-2024.4.16-cp39-cp39-win_amd64.whl", hash = "sha256:e697e1c0238133589e00c244a8b676bc2cfc3ab4961318d902040d099fec7483"}, - {file = "regex-2024.4.16.tar.gz", hash = "sha256:fa454d26f2e87ad661c4f0c5a5fe4cf6aab1e307d1b94f16ffdfcb089ba685c0"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, + {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, + {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, + {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, + {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, + {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, + {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, + {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, + {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -4553,6 +4561,21 @@ urllib3 = ">=1.25.10" [package.extras] tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +optional = false +python-versions = "*" +files = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "rfc3986" version = "1.5.0" @@ -4591,28 +4614,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.4.1" +version = "0.4.8" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2d9ef6231e3fbdc0b8c72404a1a0c46fd0dcea84efca83beb4681c318ea6a953"}, - {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9485f54a7189e6f7433e0058cf8581bee45c31a25cd69009d2a040d1bd4bfaef"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2921ac03ce1383e360e8a95442ffb0d757a6a7ddd9a5be68561a671e0e5807e"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eec8d185fe193ad053eda3a6be23069e0c8ba8c5d20bc5ace6e3b9e37d246d3f"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa27d9d72a94574d250f42b7640b3bd2edc4c58ac8ac2778a8c82374bb27984"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f1ee41580bff1a651339eb3337c20c12f4037f6110a36ae4a2d864c52e5ef954"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0926cefb57fc5fced629603fbd1a23d458b25418681d96823992ba975f050c2b"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6e37f2e3cd74496a74af9a4fa67b547ab3ca137688c484749189bf3a686ceb"}, - {file = "ruff-0.4.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd703a5975ac1998c2cc5e9494e13b28f31e66c616b0a76e206de2562e0843c"}, - {file = "ruff-0.4.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b92f03b4aa9fa23e1799b40f15f8b95cdc418782a567d6c43def65e1bbb7f1cf"}, - {file = "ruff-0.4.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c859f294f8633889e7d77de228b203eb0e9a03071b72b5989d89a0cf98ee262"}, - {file = "ruff-0.4.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b34510141e393519a47f2d7b8216fec747ea1f2c81e85f076e9f2910588d4b64"}, - {file = "ruff-0.4.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e68d248ed688b9d69fd4d18737edcbb79c98b251bba5a2b031ce2470224bdf9"}, - {file = "ruff-0.4.1-py3-none-win32.whl", hash = "sha256:b90506f3d6d1f41f43f9b7b5ff845aeefabed6d2494307bc7b178360a8805252"}, - {file = "ruff-0.4.1-py3-none-win_amd64.whl", hash = "sha256:c7d391e5936af5c9e252743d767c564670dc3889aff460d35c518ee76e4b26d7"}, - {file = "ruff-0.4.1-py3-none-win_arm64.whl", hash = "sha256:a1eaf03d87e6a7cd5e661d36d8c6e874693cb9bc3049d110bc9a97b350680c43"}, - {file = "ruff-0.4.1.tar.gz", hash = "sha256:d592116cdbb65f8b1b7e2a2b48297eb865f6bdc20641879aa9d7b9c11d86db79"}, + {file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"}, + {file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"}, + {file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"}, + {file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"}, + {file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"}, + {file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"}, + {file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"}, + {file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"}, + {file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"}, + {file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"}, + {file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"}, ] [[package]] @@ -4627,19 +4650,18 @@ files = [ [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "singledispatch" @@ -4669,17 +4691,17 @@ files = [ [[package]] name = "slack-sdk" -version = "3.27.1" +version = "3.28.0" description = "The Slack API Platform SDK for Python" optional = false python-versions = ">=3.6" files = [ - {file = "slack_sdk-3.27.1-py2.py3-none-any.whl", hash = "sha256:c108e509160cf1324c5c8b1f47ca52fb5e287021b8caf9f4ec78ad737ab7b1d9"}, - {file = "slack_sdk-3.27.1.tar.gz", hash = "sha256:85d86b34d807c26c8bb33c1569ec0985876f06ae4a2692afba765b7a5490d28c"}, + {file = "slack_sdk-3.28.0-py2.py3-none-any.whl", hash = "sha256:1a47700ae20566575ce494d1d1b6f594b011d06aad28e3b8e28c052cad1d6c4c"}, + {file = "slack_sdk-3.28.0.tar.gz", hash = "sha256:e6ece5cb70850492637e002e3b0d26d307939f4a33203b88cb274f7475c9a144"}, ] [package.extras] -optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)", "websockets (>=9.1,<10)"] +optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<13)"] [[package]] name = "smmap" @@ -4716,13 +4738,13 @@ files = [ [[package]] name = "social-auth-app-django" -version = "5.4.0" +version = "5.4.1" description = "Python Social Authentication, Django integration." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-app-django-5.4.0.tar.gz", hash = "sha256:09ac02a063cb313eed5e9ef2f9ac4477c8bf5bbd685925ff3aba43f9072f1bbb"}, - {file = "social_auth_app_django-5.4.0-py3-none-any.whl", hash = "sha256:28c65b2e2092f30cdb3cf912eeaa6988b49fdf4001b29bd89e683673d700a38e"}, + {file = "social-auth-app-django-5.4.1.tar.gz", hash = "sha256:2a43cde559dd34fdc7132417b6c52c780fa99ec2332dee9f405b4763f371c367"}, + {file = "social_auth_app_django-5.4.1-py3-none-any.whl", hash = "sha256:7519f186c63c50f2d364457b236f051338d194bcface55e318a6a705c5213477"}, ] [package.dependencies] @@ -4731,13 +4753,13 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.3" +version = "4.5.4" description = "Python social authentication made simple." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.3.tar.gz", hash = "sha256:9d9b51b7ce2ccd0b7139e6b7f52a32cb922726de819fb13babe35f12ae89852a"}, - {file = "social_auth_core-4.5.3-py3-none-any.whl", hash = "sha256:8d16e66eb97bb7be43a023d6efa16628cdc94cefd8d8053930c98a0f676867e7"}, + {file = "social-auth-core-4.5.4.tar.gz", hash = "sha256:d3dbeb0999ffd0e68aa4bd73f2ac698a18133fd11b3fc890e1366f18c8889fac"}, + {file = "social_auth_core-4.5.4-py3-none-any.whl", hash = "sha256:33cf970a623c442376f9d4a86fb187579e4438649daa5b5be993d05e74d7b2db"}, ] [package.dependencies] @@ -5015,13 +5037,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.4" +version = "0.12.5" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] [[package]] @@ -5085,13 +5107,13 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.20240406" +version = "2.32.0.20240602" description = "Typing stubs for requests" optional = true python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, - {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, + {file = "types-requests-2.32.0.20240602.tar.gz", hash = "sha256:3f98d7bbd0dd94ebd10ff43a7fbe20c3b8528acace6d8efafef0b6a184793f06"}, + {file = "types_requests-2.32.0.20240602-py3-none-any.whl", hash = "sha256:ed3946063ea9fbc6b5fc0c44fa279188bae42d582cb63760be6cb4b9d06c3de8"}, ] [package.dependencies] @@ -5110,13 +5132,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -5171,40 +5193,43 @@ files = [ [[package]] name = "watchdog" -version = "4.0.0" +version = "4.0.1" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, ] [package.extras] @@ -5375,18 +5400,18 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] aci = ["PyYAML"] @@ -5402,4 +5427,4 @@ servicenow = ["Jinja2", "PyYAML", "ijson", "oauthlib", "python-magic", "pytz", " [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "cd710f5c319a47e01c925b15ded4986b7dd40575ee65813234016f2511ffbbc6" +content-hash = "23849f65deb66d9d73f56dfeb1d7d4266f4f3c7cc9dda8f8b6b6bfe67e51755c" diff --git a/pyproject.toml b/pyproject.toml index 9eedc577f..7623bfa18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ requests = { version = ">=2.21.0", optional = true } requests-oauthlib = { version = ">=1.3.0", optional = true } six = { version = ">=1.13.0", optional = true } httpx = { version = ">=0.23.3", optional = true } +retry = "^0.9.2" [tool.poetry.group.dev.dependencies] bandit = "*" From 3de4b33ab4060a2dcf0dbfb3cbbcc42a06df4648 Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 16:43:17 -0500 Subject: [PATCH 188/229] add device fixtures --- .../integrations/itential/clients.py | 6 +- nautobot_ssot/tests/itential/fixtures/base.py | 65 ++++++++++++++ .../tests/itential/fixtures/devices.py | 87 +++++++++++++++++++ .../tests/itential/fixtures/gateways.py | 78 ++++++++++++++--- .../tests/itential/fixtures/logger.py | 6 +- nautobot_ssot/tests/itential/test_clients.py | 46 +--------- nautobot_ssot/tests/itential/test_diffsync.py | 9 ++ 7 files changed, 234 insertions(+), 63 deletions(-) create mode 100644 nautobot_ssot/tests/itential/fixtures/base.py create mode 100644 nautobot_ssot/tests/itential/fixtures/devices.py create mode 100644 nautobot_ssot/tests/itential/test_diffsync.py diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 913fb5a25..78c341652 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -170,8 +170,7 @@ def update_device( Returns: dict: API client return message. """ - payload = {"name": device_name, "variables": variables} - response = self._put(uri=f"devices/{device_name}", json_data=payload) + response = self._put(uri=f"devices/{device_name}", json_data=variables) if response.ok: self.job.log_info(message=f"Updating {device_name} on {self.host}.") return response.json() @@ -247,8 +246,7 @@ def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requ Returns: dict: API client return message. """ - payload = {"name": group_name, "variables": variables} - response = self._put(uri=f"groups/{group_name}", json_data=payload) + response = self._put(uri=f"groups/{group_name}", json_data=variables) if response.ok: self.job.log_info(message=f"Updating {group_name} on {self.host}.") return response.json() diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py new file mode 100644 index 000000000..9cee00056 --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -0,0 +1,65 @@ +"""Itential SSoT Base TestCase.""" + +import os +import requests_mock +from unittest import TestCase + +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +from nautobot_ssot.tests.itential.fixtures import gateways, logger, urls, clients, devices + + +class ItentialSSoTBaseTestCase(TestCase): + """Itential Automation Gateway Client Test Cases.""" + + def setUp(self): + """Setup test cases.""" + self.job = logger.JobLogger() + self.requests_mock = requests_mock.Mocker() + self.requests_mock.start() + + for device in gateways.gateways: + os.environ[device.get("username_env")] = "testUser" + os.environ[device.get("password_env")] = "testPass" + + gateways.update_or_create_automation_gateways( + name=device.get("name"), + description=device.get("description"), + location=device.get("location"), + region=device.get("region"), + gateway=device.get("gateway"), + enabled=device.get("enabled"), + username_env=device.get("username_env"), + password_env=device.get("password_env"), + secret_group=device.get("secret_group"), + ) + + for url_item in urls.data: + self.requests_mock.register_uri( + method=url_item.get("method"), + url=url_item.get("url"), + json=url_item.get("json"), + status_code=url_item.get("status_code", 200), + headers=url_item.get("headers", {}), + cookies=url_item.get("cookies", {}), + ) + + for device in devices.data: + devices.update_or_create_device_object( + status=device.get("status"), + role=device.get("role"), + name=device.get("name"), + location=device.get("location"), + manufacturer=device.get("manufacturer"), + platform=device.get("platform"), + network_driver=device.get("network_driver"), + model=device.get("model"), + interface=device.get("interface"), + ip_address=device.get("ip_address"), + ) + + self.gateway = AutomationGatewayModel.objects.first() + self.client = clients.api_client(self.gateway) + + def tearDown(self): + """Teardown test cases.""" + self.requests_mock.stop() diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py new file mode 100644 index 000000000..b2cc83fab --- /dev/null +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -0,0 +1,87 @@ +"""Itential SsoT Nautobot device fixtures.""" + +from django.contrib.contenttypes.models import ContentType + +from nautobot.dcim.models import ( + Location, + LocationType, + Manufacturer, + Platform, + Device, + DeviceType, + Interface, +) +from nautobot.extras.models import Status, Role +from nautobot.ipam.models import Preifx, IPAddress, Namespace + + +data = [ + { + "name": "rtr1.example.net", + "location": "USA", + "model": "Cisco 2901", + "interface": "gigabitEthernet0/1", + "ip_address": "192.0.2.1", + "platform": "Cisco IOS", + "network_driver": "cisco_ios", + "role": "Router", + "status": "Active", + }, + { + "name": "rtr2.example.net", + "location": "USA", + "model": "Cisco 2901", + "interface": "gigabitEthernet0/1", + "ip_address": None, + "platform": "Cisco IOS", + "network_driver": "cisco_ios", + "role": "Router", + "status": "Active", + }, +] + + +def add_content_type(model: object, content_type: object, changed: bool): + """Add a content type to a model.""" + + if changed: + model.content_types.add(content_type) + + model.save() + + +def update_or_create_device_object( + status: str, + role: str, + name: str, + location: str, + manufacturer: str, + platform: str, + network_driver: str, + model: str, + interface: str, + ip_address: str, +): + """Create or update device fixtures.""" + status = Status.objects.get(name="Active") + namespace = Namespace.objects.get(name="Global") + ip_prefix, _ = Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) + device_content_type = ContentType.objects.get_for_model(Device) + role, role_changed = Role.objects.update_or_create(name=role) + add_content_type(model=role, content_type=device_content_type, changed=role_changed) + location_type, location_type_changed = LocationType.objects.get_or_create(name="Region") + add_content_type(model=location_type, content_type=device_content_type, changed=location_type_changed) + location, _ = Location.objects.get_or_create(name=location, location_type=location_type, status=status) + manufacturer, _ = Manufacturer.objects.update_or_create(name=manufacturer) + platform, _ = Platform.objects.update_or_create( + name=platform, manufacturer=manufacturer, network_driver=network_driver + ) + device_type, _ = DeviceType.objects.update_or_create(manufacturer=manufacturer, model=model) + device, _ = Device.objects.update_or_create( + name=name, role=role, device_type=device_type, location=location, status=status, platform=platform + ) + interface, _ = Interface.objects.update_or_create(name=interace, status=status, device=device) + + if ip_address: + ip_address, _ = IPAddress.objects.update_or_create(host=ip_address, mask_length=32, status=status) + ip_address.primary_ip4_for(device) diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 78ec4e7c3..9203d71a2 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -44,18 +44,72 @@ "hostname": "https://iag1.example.com:8443", "responses": { "login": {"token": "abc123="}, - "logout": {}, - "poll": {}, - "get_devices": {}, - "get_device": {}, - "create_device": {}, - "update_device": {}, - "delete_device": {}, - "get_groups": {}, - "get_group": {}, - "create_group": {}, - "update_group": {}, - "delete_group": {}, + "logout": "User was successfully logged out of session", + "poll": {"success": True, "hostname": "localhost", "serverId": "00:00:00:00:00:00:8443"}, + "get_devices": { + "meta": { + "count": 1, + "query_object": {"offset": None, "limit": None, "filter": None, "order": "ascending"}, + "total_count": 1, + }, + "data": [ + { + "name": "rtr1.example.net", + "variables": { + "ansible_host": "192.0.2.1", + "ansible_network_os": "cisco.ios.ios", + "ansible_connection": "ansible.netcommon.network_cli", + "ansible_port": 22, + }, + } + ], + }, + "get_device": { + "name": "rtr1.example.net", + "variables": { + "ansible_host": "192.0.2.1", + "ansible_network_os": "cisco.ios.ios", + "ansible_connection": "ansible.netcommon.network_cli", + "ansible_port": 22, + }, + }, + "create_device": {"name": "rtr10.example.net", "variables": {"ansible_host": "192.0.2.10"}}, + "update_device": {"name": "rtr10.example.net", "variables": {"ansible_host": "192.0.2.10"}}, + "delete_device": {"code": 200, "status": 200, "message": "deleted"}, + "get_groups": { + "meta": { + "count": 1, + "query_object": {"offset": None, "limit": None, "filter": None, "order": "ascending"}, + "total_count": 1, + }, + "data": [ + { + "name": "rtr1.example.net", + "variables": {"ansible_user": "testUser", "ansible_password": "testPass"}, + "devices": ["rtr1.example.net"], + "childGroups": [], + } + ], + }, + "get_group": { + "name": "rtr1.example.net", + "variables": {"ansible_user": "testUser", "ansible_password": "testPass"}, + "devices": ["rtr1.example.net"], + "childGroups": [], + }, + "create_group": { + "name": "test-group", + "variables": {}, + "devices": [], + "childGroups": [], + }, + "update_group": { + "name": "test-group", + "variables": {"key": "value"}, + "devices": [], + "childGroups": [], + }, + "delete_group": {"code": 200, "status": 200, "message": "deleted"}, }, }, } diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py index 6b5ec5e51..4718d2e55 100644 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -6,14 +6,14 @@ class JobLogger: """Job Logger.""" - def log_info(message: str): + def log_info(self, message: str): """Info logging.""" logging.info(message) - def log_warning(message: str): + def log_warning(self, message: str): """Warning logging.""" logging.warning(message) - def log_failure(message: str): + def log_failure(self, message: str): """Failure logging.""" logging.error(message) diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index d745d3c08..e18207023 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -1,49 +1,11 @@ """Itential SSoT API Client Tests.""" -import os -import requests_mock -from unittest import TestCase +from nautobot_ssot.tests.itential.fixtures.base import ItentialSSoTBaseTestCase -from nautobot_ssot.integrations.itential.models import AutomationGatewayModel -from nautobot_ssot.tests.itential.fixtures import gateways, logger, urls, clients - -class AutomationGatewayClientTestCase(TestCase): +class AutomationGatewayClientTestCase(ItentialSSoTBaseTestCase): """Itential Automation Gateway Client Test Cases.""" - def setUp(self): - """Setup test cases.""" - self.job = logger.JobLogger() - self.requests_mock = requests_mock.Mocker() - self.requests_mock.start() - - for device in gateways.gateways: - os.environ[device.get("username_env")] = "testUser" - os.environ[device.get("password_env")] = "testPass" - - gateways.update_or_create_automation_gateways( - name=device.get("name"), - description=device.get("description"), - location=device.get("location"), - region=device.get("region"), - gateway=device.get("gateway"), - enabled=device.get("enabled"), - username_env=device.get("username_env"), - password_env=device.get("password_env"), - secret_group=device.get("secret_group"), - ) - - for url_item in urls.data: - self.requests_mock.register_uri( - method=url_item.get("method"), - url=url_item.get("url"), - json=url_item.get("json"), - status_code=url_item.get("status_code", 200), - ) - - self.gateway = AutomationGatewayModel.objects.first() - self.client = clients.api_client(self.gateway) - def test_login__success(self): """Test API client login.""" response = self.client.login() @@ -103,7 +65,3 @@ def test_logout__success(self): """Test API client logout.""" response = self.client.logout() self.assertEquals(response, gateways.responses["iag1"]["responses"].get("logout")) - - def tearDown(self): - """Teardown test cases.""" - self.requests_mock.stop() diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py new file mode 100644 index 000000000..79063a398 --- /dev/null +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -0,0 +1,9 @@ +"""Itential SSoT DiffSync tests.""" + +from nautobot_ssot.tests.itential.fixtures.base import ItentialSSoTBaseTestCase + + +class DiffSyncTestCases(ItentialSSoTBaseTestCase): + """DiffSync test cases.""" + + pass From 02a564ebc8990912dbcf41e962b8ac367c6f1185 Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 17:13:24 -0500 Subject: [PATCH 189/229] update fixtures --- nautobot_ssot/tests/itential/fixtures/devices.py | 12 +++++++----- nautobot_ssot/tests/itential/fixtures/logger.py | 6 +++--- nautobot_ssot/tests/itential/test_clients.py | 1 + 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index b2cc83fab..a0a9e7419 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -12,13 +12,14 @@ Interface, ) from nautobot.extras.models import Status, Role -from nautobot.ipam.models import Preifx, IPAddress, Namespace +from nautobot.ipam.models import Prefix, IPAddress, Namespace data = [ { "name": "rtr1.example.net", - "location": "USA", + "location": "North America", + "manufacturer": "Cisco", "model": "Cisco 2901", "interface": "gigabitEthernet0/1", "ip_address": "192.0.2.1", @@ -29,7 +30,8 @@ }, { "name": "rtr2.example.net", - "location": "USA", + "location": "North America", + "manufacturer": "Cisco", "model": "Cisco 2901", "interface": "gigabitEthernet0/1", "ip_address": None, @@ -80,8 +82,8 @@ def update_or_create_device_object( device, _ = Device.objects.update_or_create( name=name, role=role, device_type=device_type, location=location, status=status, platform=platform ) - interface, _ = Interface.objects.update_or_create(name=interace, status=status, device=device) + interface, _ = Interface.objects.update_or_create(name=interface, status=status, device=device) if ip_address: ip_address, _ = IPAddress.objects.update_or_create(host=ip_address, mask_length=32, status=status) - ip_address.primary_ip4_for(device) + ip_address.primary_ip4_for.add(device) diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py index 4718d2e55..6b5ec5e51 100644 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -6,14 +6,14 @@ class JobLogger: """Job Logger.""" - def log_info(self, message: str): + def log_info(message: str): """Info logging.""" logging.info(message) - def log_warning(self, message: str): + def log_warning(message: str): """Warning logging.""" logging.warning(message) - def log_failure(self, message: str): + def log_failure(message: str): """Failure logging.""" logging.error(message) diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index e18207023..9e1ad5cb0 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -1,6 +1,7 @@ """Itential SSoT API Client Tests.""" from nautobot_ssot.tests.itential.fixtures.base import ItentialSSoTBaseTestCase +from nautobot_ssot.tests.itential.fixtures import gateways class AutomationGatewayClientTestCase(ItentialSSoTBaseTestCase): From 355963e279310c87585c257df3e68507eb4208aa Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 21:17:50 -0500 Subject: [PATCH 190/229] initial diffsync test --- .../itential/diffsync/adapters/itential.py | 3 ++- .../itential/diffsync/adapters/nautobot.py | 11 ++++++----- nautobot_ssot/tests/itential/fixtures/base.py | 17 ++++++++++++++--- .../tests/itential/fixtures/clients.py | 2 +- nautobot_ssot/tests/itential/fixtures/logger.py | 6 +++--- nautobot_ssot/tests/itential/test_diffsync.py | 9 ++++++--- 6 files changed, 32 insertions(+), 16 deletions(-) diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index b0a5b289b..e28c33922 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -13,12 +13,13 @@ class ItentialAnsibleDeviceAdapter(DiffSync): def __init__(self, api_client: AutomationGatewayClient, job: object, *args, **kwargs): """Initialize Diffsync Adapter.""" + super().__init__(*args, **kwargs) self.api_client = api_client self.job = job def load(self): """Load Adapter.""" - self.job.load_info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") + self.job.log_info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") devices = self.api_client.get_devices().get("data") for iag_device in devices: diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index f2f762332..fd0892ef8 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -10,14 +10,15 @@ from nautobot.dcim.models import Device, Location -class NautobotAnsibleAdapter(DiffSync): +class NautobotAnsibleDeviceAdapter(DiffSync): """Nautobot => Itential Ansible Device Diffsync Adapter.""" device = NautobotAnsibleDeviceModel top_level = ["device"] - def __init__(self, job: object, location: str, location_descendants: bool): + def __init__(self, job: object, location: str, location_descendants: bool, *args, **kwargs): """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" + super().__init__(*args, **kwargs) self.job = job self.location = location self.location_descendants = location_descendants @@ -49,7 +50,7 @@ def _ansible_vars(device_obj: Device) -> dict: else: ansible_network_os = {} - ansible_host = {"ansible_host": device_obj.primary_ipv4.host} + ansible_host = {"ansible_host": device_obj.primary_ip4.host} config_context = device_obj.get_config_context() return {**ansible_host, **ansible_network_os, **config_context} @@ -58,10 +59,10 @@ def load(self): """Load Nautobot Diffsync adapter.""" self.job.log_info(message="Loading locations from Nautobot.") location = Location.objects.get(name=self.location) - locations = location.get_descendants(include_self=True) if self.location_descendants else location + locations = location.descendants(include_self=True) if self.location_descendants else location self.job.log_info(message="Loading devices from Nautobot.") - devices = Device.objects.filter(location__in=locations).exclude(primary_ipv4=None) + devices = Device.objects.filter(location__in=locations).exclude(primary_ip4=None) for nb_device in devices: try: diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index 9cee00056..f44a90f73 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -2,10 +2,13 @@ import os import requests_mock -from unittest import TestCase + +from nautobot.apps.testing import TestCase from nautobot_ssot.integrations.itential.models import AutomationGatewayModel -from nautobot_ssot.tests.itential.fixtures import gateways, logger, urls, clients, devices +from nautobot_ssot.integrations.itential.diffsync.adapters import itential, nautobot +from nautobot_ssot.tests.itential.fixtures import gateways, urls, clients, devices +from nautobot_ssot.tests.itential.fixtures.logger import JobLogger class ItentialSSoTBaseTestCase(TestCase): @@ -13,7 +16,8 @@ class ItentialSSoTBaseTestCase(TestCase): def setUp(self): """Setup test cases.""" - self.job = logger.JobLogger() + self.job = JobLogger() + self.job.log_info(message="test") self.requests_mock = requests_mock.Mocker() self.requests_mock.start() @@ -59,6 +63,13 @@ def setUp(self): self.gateway = AutomationGatewayModel.objects.first() self.client = clients.api_client(self.gateway) + self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job) + self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( + job=self.job, location="North America", location_descendants=True + ) + + self.itential_adapter.load() + self.nautobot_adapter.load() def tearDown(self): """Teardown test cases.""" diff --git a/nautobot_ssot/tests/itential/fixtures/clients.py b/nautobot_ssot/tests/itential/fixtures/clients.py index 51dc51fac..ff2fb96f8 100644 --- a/nautobot_ssot/tests/itential/fixtures/clients.py +++ b/nautobot_ssot/tests/itential/fixtures/clients.py @@ -8,7 +8,7 @@ from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient -def api_client(device_obj: AutomationGatewayModel, job: object = logger.JobLogger) -> AutomationGatewayClient: +def api_client(device_obj: AutomationGatewayModel, job: object = logger.JobLogger()) -> AutomationGatewayClient: """Initialize API Client.""" return AutomationGatewayClient( diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py index 6b5ec5e51..4718d2e55 100644 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -6,14 +6,14 @@ class JobLogger: """Job Logger.""" - def log_info(message: str): + def log_info(self, message: str): """Info logging.""" logging.info(message) - def log_warning(message: str): + def log_warning(self, message: str): """Warning logging.""" logging.warning(message) - def log_failure(message: str): + def log_failure(self, message: str): """Failure logging.""" logging.error(message) diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py index 79063a398..9549da903 100644 --- a/nautobot_ssot/tests/itential/test_diffsync.py +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -1,9 +1,12 @@ """Itential SSoT DiffSync tests.""" -from nautobot_ssot.tests.itential.fixtures.base import ItentialSSoTBaseTestCase +from nautobot_ssot.tests.itential.fixtures import base -class DiffSyncTestCases(ItentialSSoTBaseTestCase): +class DiffSyncTestCases(base.ItentialSSoTBaseTestCase): """DiffSync test cases.""" - pass + def test_inventory_diff(self): + """Test diff exists.""" + diff = self.nautobot_adapter.diff_to(self.itential_adapter) + self.assertTrue(diff.has_diffs()) From d119757a9f202f4fe8aea55110a2313d53351acf Mon Sep 17 00:00:00 2001 From: jtdub Date: Mon, 15 Apr 2024 22:10:27 -0500 Subject: [PATCH 191/229] diffsync tests --- .../integrations/itential/clients.py | 37 +++++++++++++++++++ .../itential/diffsync/adapters/nautobot.py | 2 +- .../itential/diffsync/models/itential.py | 2 +- .../tests/itential/fixtures/devices.py | 17 +++++++++ .../tests/itential/fixtures/gateways.py | 17 +++++++-- nautobot_ssot/tests/itential/fixtures/urls.py | 10 +++++ nautobot_ssot/tests/itential/test_diffsync.py | 5 +++ 7 files changed, 85 insertions(+), 5 deletions(-) diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 78c341652..2ceca9368 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -268,3 +268,40 @@ def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTT return response.json() self.job.log_warning(message=f"Failed to delete {group_name} on {self.host}.") return response.raise_for_status() + + def add_device_to_group(self, group_name: str, device_name: str) -> Union[requests.Response, requests.HTTPError]: + """Add a device to a group. + + Args: + group_name (str): Group name. + device_name (str): Device name. + + Returns: + Union[requests.Response, requests.HTTPError]: API client return message. + """ + device_name = [device_name] + response = self._post(uri=f"groups/{group_name}/devices", json_data=device_name) + if response.ok: + self.job.log_info(message=f"Adding {device_name} to {group_name} group on {self.host}.") + return response.json() + self.job.log_warning(message=f"Failed to add {device_name} to {group_name} group on {self.host}.") + return response.raise_for_status() + + def delete_device_from_group( + self, group_name: str, device_name: str + ) -> Union[requests.Response, requests.HTTPError]: + """Delete a device from a group. + + Args: + group_name (str): Group name. + device_name (str): Device name. + + Returns: + Union[requests.Response, requests.HTTPError]: API client return message. + """ + response = self._delete(uri=f"groups/{group_name}/devices/{device_name}") + if response.ok: + self.job.log_info(message=f"Deleting {device_name} from {group_name} group on {self.host}.") + return response.json() + self.job.log_warning(message=f"Failed to delete {device_name} from {group_name} group on {self.host}.") + return response.raise_for_status() diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index fd0892ef8..1bf2fe55a 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -43,7 +43,7 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: return True - def _ansible_vars(device_obj: Device) -> dict: + def _ansible_vars(self, device_obj: Device) -> dict: """Create device variables to load into Automation Gateway.""" if device_obj.platform and device_obj.platform.network_driver_mappings.get("ansible"): ansible_network_os = {"ansible_network_os": device_obj.platform.network_driver_mappings.get("ansible")} diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index 590f73fb8..e6ec8d286 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -29,5 +29,5 @@ def delete(self): def update(self, attrs): """Update device in Automation Gateway.""" - self.diffsync.api_cient.update_device(device_name=self.name, variables=attrs.get("variables")) + self.diffsync.api_client.update_device(device_name=self.name, variables=attrs.get("variables")) return super().update(attrs) diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index a0a9e7419..371057919 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -27,6 +27,7 @@ "network_driver": "cisco_ios", "role": "Router", "status": "Active", + "config_context": {"ansible_port": 22, "ansible_connection": "ansible.netcommon.network_cli"}, }, { "name": "rtr2.example.net", @@ -40,6 +41,18 @@ "role": "Router", "status": "Active", }, + { + "name": "rtr10.example.net", + "location": "North America", + "manufacturer": "Cisco", + "model": "Cisco 2901", + "interface": "gigabitEthernet0/1", + "ip_address": "192.0.2.10", + "platform": "Cisco IOS", + "network_driver": "cisco_ios", + "role": "Router", + "status": "Active", + }, ] @@ -63,6 +76,7 @@ def update_or_create_device_object( model: str, interface: str, ip_address: str, + config_context: dict = {}, ): """Create or update device fixtures.""" status = Status.objects.get(name="Active") @@ -87,3 +101,6 @@ def update_or_create_device_object( if ip_address: ip_address, _ = IPAddress.objects.update_or_create(host=ip_address, mask_length=32, status=status) ip_address.primary_ip4_for.add(device) + + device.local_config_context = config_context + device.save() diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 9203d71a2..ac740b322 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -73,8 +73,17 @@ "ansible_port": 22, }, }, - "create_device": {"name": "rtr10.example.net", "variables": {"ansible_host": "192.0.2.10"}}, - "update_device": {"name": "rtr10.example.net", "variables": {"ansible_host": "192.0.2.10"}}, + "create_device": { + "name": "rtr10.example.net", + "variables": {"ansible_host": "192.0.2.10", "ansible_network_os": "cisco.ios.ios"}, + }, + "update_device": { + "name": "rtr10.example.net", + "variables": { + "ansible_host": "192.0.2.10", + "ansible_network_os": "cisco.ios.ios", + }, + }, "delete_device": {"code": 200, "status": 200, "message": "deleted"}, "get_groups": { "meta": { @@ -92,7 +101,7 @@ ], }, "get_group": { - "name": "rtr1.example.net", + "name": "all", "variables": {"ansible_user": "testUser", "ansible_password": "testPass"}, "devices": ["rtr1.example.net"], "childGroups": [], @@ -110,6 +119,8 @@ "childGroups": [], }, "delete_group": {"code": 200, "status": 200, "message": "deleted"}, + "add_device_to_group": ["rtr1.example.net"], + "delete_device_from_group": {"code": 200, "status": 200, "message": "deleted"}, }, }, } diff --git a/nautobot_ssot/tests/itential/fixtures/urls.py b/nautobot_ssot/tests/itential/fixtures/urls.py index c06c19391..7397c3812 100644 --- a/nautobot_ssot/tests/itential/fixtures/urls.py +++ b/nautobot_ssot/tests/itential/fixtures/urls.py @@ -69,4 +69,14 @@ "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/test-group", "json": gateways.responses["iag1"]["responses"].get("delete_group"), }, + { + "method": "POST", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/all/devices", + "json": gateways.responses["iag1"]["responses"].get("add_device_to_group"), + }, + { + "method": "DELETE", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/all/devices/rtr1.example.net", + "json": gateways.responses["iag1"]["responses"].get("delete_device_from_group"), + }, ] diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py index 9549da903..c168df287 100644 --- a/nautobot_ssot/tests/itential/test_diffsync.py +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -10,3 +10,8 @@ def test_inventory_diff(self): """Test diff exists.""" diff = self.nautobot_adapter.diff_to(self.itential_adapter) self.assertTrue(diff.has_diffs()) + + def test_inventory_sync(self): + """Test successful sync.""" + diff = self.nautobot_adapter.sync_to(self.itential_adapter) + self.assertFalse(diff.has_diffs()) From 9f83d39ffdbeb6b402b9d34059f7804b03890d7a Mon Sep 17 00:00:00 2001 From: jtdub Date: Tue, 16 Apr 2024 10:07:21 -0500 Subject: [PATCH 192/229] update diffsync test --- nautobot_ssot/tests/itential/fixtures/base.py | 4 +++- .../tests/itential/fixtures/devices.py | 16 ++++++++++++++-- .../tests/itential/fixtures/gateways.py | 18 +++++++++++++++--- nautobot_ssot/tests/itential/fixtures/urls.py | 4 ++-- nautobot_ssot/tests/itential/test_diffsync.py | 3 ++- 5 files changed, 36 insertions(+), 9 deletions(-) diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index f44a90f73..3faf145bb 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -3,6 +3,8 @@ import os import requests_mock +# from unittest import TestCase + from nautobot.apps.testing import TestCase from nautobot_ssot.integrations.itential.models import AutomationGatewayModel @@ -17,7 +19,6 @@ class ItentialSSoTBaseTestCase(TestCase): def setUp(self): """Setup test cases.""" self.job = JobLogger() - self.job.log_info(message="test") self.requests_mock = requests_mock.Mocker() self.requests_mock.start() @@ -59,6 +60,7 @@ def setUp(self): model=device.get("model"), interface=device.get("interface"), ip_address=device.get("ip_address"), + config_context=device.get("config_context"), ) self.gateway = AutomationGatewayModel.objects.first() diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index 371057919..6caf71c5b 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -53,6 +53,18 @@ "role": "Router", "status": "Active", }, + { + "name": "rtr11.example.net", + "location": "North America", + "manufacturer": "Cisco", + "model": "NCS 5501", + "interface": "managementEthernet0/0/0/1", + "ip_address": "192.0.2.11", + "platform": "Cisco IOS-XR", + "network_driver": "cisco_xr", + "role": "Router", + "status": "Active", + }, ] @@ -79,7 +91,7 @@ def update_or_create_device_object( config_context: dict = {}, ): """Create or update device fixtures.""" - status = Status.objects.get(name="Active") + status = Status.objects.get(name=status) namespace = Namespace.objects.get(name="Global") ip_prefix, _ = Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) device_content_type = ContentType.objects.get_for_model(Device) @@ -102,5 +114,5 @@ def update_or_create_device_object( ip_address, _ = IPAddress.objects.update_or_create(host=ip_address, mask_length=32, status=status) ip_address.primary_ip4_for.add(device) - device.local_config_context = config_context + device.local_config_context_data = config_context device.save() diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index ac740b322..f11abecf8 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -61,7 +61,19 @@ "ansible_connection": "ansible.netcommon.network_cli", "ansible_port": 22, }, - } + }, + { + "name": "rtr10.example.net", + "variables": { + "ansible_host": "192.0.2.1", + }, + }, + { + "name": "rtr12.example.net", + "variables": { + "ansible_host": "192.0.2.12", + }, + }, ], }, "get_device": { @@ -74,8 +86,8 @@ }, }, "create_device": { - "name": "rtr10.example.net", - "variables": {"ansible_host": "192.0.2.10", "ansible_network_os": "cisco.ios.ios"}, + "name": "rtr11.example.net", + "variables": {"ansible_host": "192.0.2.11", "ansible_network_os": "cisco.iosxr.iosxr"}, }, "update_device": { "name": "rtr10.example.net", diff --git a/nautobot_ssot/tests/itential/fixtures/urls.py b/nautobot_ssot/tests/itential/fixtures/urls.py index 7397c3812..208e64741 100644 --- a/nautobot_ssot/tests/itential/fixtures/urls.py +++ b/nautobot_ssot/tests/itential/fixtures/urls.py @@ -41,7 +41,7 @@ }, { "method": "DELETE", - "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr10.example.net", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr12.example.net", "json": gateways.responses["iag1"]["responses"].get("delete_device"), }, { @@ -76,7 +76,7 @@ }, { "method": "DELETE", - "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/all/devices/rtr1.example.net", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/groups/all/devices/rtr12.example.net", "json": gateways.responses["iag1"]["responses"].get("delete_device_from_group"), }, ] diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py index c168df287..20c3e4ea9 100644 --- a/nautobot_ssot/tests/itential/test_diffsync.py +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -13,5 +13,6 @@ def test_inventory_diff(self): def test_inventory_sync(self): """Test successful sync.""" - diff = self.nautobot_adapter.sync_to(self.itential_adapter) + self.nautobot_adapter.sync_to(self.itential_adapter) + diff = self.nautobot_adapter.diff_to(self.itential_adapter) self.assertFalse(diff.has_diffs()) From 48724b4b03730a29fa40866a3e7d54979e67a540 Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 09:21:43 -0500 Subject: [PATCH 193/229] initial job add --- nautobot_ssot/integrations/itential/jobs.py | 141 ++++++++++++++++++++ nautobot_ssot/tests/itential/test_jobs.py | 1 + 2 files changed, 142 insertions(+) create mode 100644 nautobot_ssot/integrations/itential/jobs.py create mode 100644 nautobot_ssot/tests/itential/test_jobs.py diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py new file mode 100644 index 000000000..3d47c852b --- /dev/null +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -0,0 +1,141 @@ +"""Itential SSoT Jobs.""" + +from datetime import datetime + +from django.forms import HiddenInput + +from nautobot.dcim.models import Location +from nautobot.extras.jobs import BooleanVar, ObjectVar, Job + +from nautobot_ssot.jobs.base import DataTarget +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices + +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient +from nautobot_ssot.integrations.itential.diffsync.adapters.itential import ItentialAnsibleDeviceAdapter +from nautobot_ssot.integrations.itential.diffsync.adapters.nautobot import NautobotAnsibleDeviceAdapter + + +name = "SSoT - Itential" # pylint: disable=invalid-name + + +class ItentialAutomationGatewayDataTarget(DataTarget, Job): + """Job syncing Nautobot to Itential Automation Gateway.""" + + dryrun = BooleanVar(default=False, widget=HiddenInput(), required=True) + location = ObjectVar(model=Location, description="Choose a location to sync to.", required=True) + + class Meta: + """Meta class definition.""" + + name = "Nautobot ⟹ Itential Automation Gateway" + data_target = "Itential Automation Gateway" + # data_source_icon = static("nautobot_ssot_itential/itential.png") + description = "Sync data from Nautobot into Itential Automation Gateway." + field_order = ("location", "dry_run") + + def gateways(self, location): + """Fetch Automation Gateways to sync.""" + self.logger.info(f"Loading gateays for {location}.") + gateways = AutomationGatewayClient.objects.filter(enabled=True, location=location) + return gateways + + def load_source_adapter(self, location: Location): + """Load Nautobot adapter.""" + self.source_adapter = NautobotAnsibleDeviceAdapter(job=self, sync=self.sync, location=location) + self.logger.info("Loading data from Nautobot.") + self.source_adapter.load() + + def load_target_adapter(self, api_client: AutomationGatewayClient): + """Load Itential adapter.""" + self.target_adapter = ItentialAnsibleDeviceAdapter(job=self, sync=self.sync, api_client=api_client) + self.logger.info("Loading data from Itential.") + self.target_adapter.load() + + def sync_data(self, memory_profiling): + """Execute Nautobot ⟹ Itential Automation Gateway sync.""" + + def record_memory_trace(step: str): + """Helper function to record memory usage and reset tracemalloc stats.""" + memory_final, memory_peak = tracemalloc.get_traced_memory() + setattr(self.sync, f"{step}_memory_final", memory_final) + setattr(self.sync, f"{step}_memory_peak", memory_peak) + self.sync.save() + self.logger.info("Traced memory for %s (Final, Peak): %s bytes, %s bytes", step, memory_final, memory_peak) + tracemalloc.clear_traces() + + if not self.sync: + return + + if memory_profiling: + tracemalloc.start() + + start_time = datetime.now() + + self.logger.info("Loading current data from source adapter...") + self.load_source_adapter(location=self.location) + load_source_adapter_time = datetime.now() + self.sync.source_load_time = load_source_adapter_time - start_time + self.sync.save() + self.logger.info("Source Load Time from %s: %s", self.source_adapter, self.sync.source_load_time) + + if memory_profiling: + record_memory_trace("source_load") + + for device in self.gateways(location=self.location): + with AutomationGatewayClient( + host=device.gateway.remote_url, + username=device.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ), + password=device.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ), + job=self, + verify_ssl=device.gateway.verify_ssl, + ) as api_client: + self.logger.info("Loading current data from target adapter...") + self.load_target_adapter(api_client=api_client, job=self) + load_target_adapter_time = datetime.now() + self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time + self.sync.save() + self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + + if memory_profiling: + record_memory_trace("target_load") + + self.logger.info("Calculating diffs...") + self.calculate_diff() + calculate_diff_time = datetime.now() + self.sync.diff_time = calculate_diff_time - load_target_adapter_time + self.sync.save() + self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) + + if memory_profiling: + record_memory_trace("diff") + + if self.dryrun: + self.logger.info("As `dryrun` is set, skipping the actual data sync.") + else: + self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) + self.execute_sync() + execute_sync_time = datetime.now() + self.sync.sync_time = execute_sync_time - calculate_diff_time + self.sync.save() + self.logger.info("Sync complete") + self.logger.info("Sync Time: %s", self.sync.sync_time) + + if memory_profiling: + record_memory_trace("sync") + + def run(self, dryrun, memory_profiling, location, *args, **kwargs): + """Execute sync.""" + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.location = location + super().__init__(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +jobs = [ItentialAutomationGatewayDataTarget] diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py new file mode 100644 index 000000000..49492a073 --- /dev/null +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -0,0 +1 @@ +"""Itential SSoT Jobs Test Cases.""" From fed8d373e82914187ef74a44454f64d53e5a1d1b Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 10:13:10 -0500 Subject: [PATCH 194/229] update job --- .../itential/diffsync/adapters/itential.py | 3 ++- .../itential/diffsync/adapters/nautobot.py | 3 ++- nautobot_ssot/integrations/itential/jobs.py | 27 +++++++++++-------- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index e28c33922..386054ab2 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -11,11 +11,12 @@ class ItentialAnsibleDeviceAdapter(DiffSync): device = ItentialAnsibleDeviceModel top_level = ["device"] - def __init__(self, api_client: AutomationGatewayClient, job: object, *args, **kwargs): + def __init__(self, api_client: AutomationGatewayClient, job: object, sync: object, *args, **kwargs): """Initialize Diffsync Adapter.""" super().__init__(*args, **kwargs) self.api_client = api_client self.job = job + self.sync = sync def load(self): """Load Adapter.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index 1bf2fe55a..c187e01d9 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -16,11 +16,12 @@ class NautobotAnsibleDeviceAdapter(DiffSync): device = NautobotAnsibleDeviceModel top_level = ["device"] - def __init__(self, job: object, location: str, location_descendants: bool, *args, **kwargs): + def __init__(self, job: object, sync: object, location: str, location_descendants: bool, *args, **kwargs): """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" super().__init__(*args, **kwargs) self.job = job self.location = location + self.sync = sync self.location_descendants = location_descendants def _is_rfc1123_compliant(self, device_name: str) -> bool: diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 3d47c852b..5c8c5f8f2 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -24,6 +24,7 @@ class ItentialAutomationGatewayDataTarget(DataTarget, Job): dryrun = BooleanVar(default=False, widget=HiddenInput(), required=True) location = ObjectVar(model=Location, description="Choose a location to sync to.", required=True) + location_descendants = BooleanVar(default=True, required=True) class Meta: """Meta class definition.""" @@ -32,17 +33,20 @@ class Meta: data_target = "Itential Automation Gateway" # data_source_icon = static("nautobot_ssot_itential/itential.png") description = "Sync data from Nautobot into Itential Automation Gateway." - field_order = ("location", "dry_run") + field_order = ("location", "location_descendants", "dry_run") - def gateways(self, location): + @property + def gateways(self): """Fetch Automation Gateways to sync.""" - self.logger.info(f"Loading gateays for {location}.") - gateways = AutomationGatewayClient.objects.filter(enabled=True, location=location) + self.logger.info(f"Loading gateays for {self.location}.") + gateways = AutomationGatewayClient.objects.filter(enabled=True, location=self.location) return gateways - def load_source_adapter(self, location: Location): + def load_source_adapter(self, location: Location, location_descendants: bool): """Load Nautobot adapter.""" - self.source_adapter = NautobotAnsibleDeviceAdapter(job=self, sync=self.sync, location=location) + self.source_adapter = NautobotAnsibleDeviceAdapter( + job=self, sync=self.sync, location=location, location_descendants=location_descendants + ) self.logger.info("Loading data from Nautobot.") self.source_adapter.load() @@ -73,7 +77,7 @@ def record_memory_trace(step: str): start_time = datetime.now() self.logger.info("Loading current data from source adapter...") - self.load_source_adapter(location=self.location) + self.load_source_adapter(location=self.location, location_descendants=self.location_descendants) load_source_adapter_time = datetime.now() self.sync.source_load_time = load_source_adapter_time - start_time self.sync.save() @@ -82,7 +86,7 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("source_load") - for device in self.gateways(location=self.location): + for device in self.gateways: with AutomationGatewayClient( host=device.gateway.remote_url, username=device.gateway.secrets_group.get_secret_value( @@ -96,8 +100,8 @@ def record_memory_trace(step: str): job=self, verify_ssl=device.gateway.verify_ssl, ) as api_client: - self.logger.info("Loading current data from target adapter...") - self.load_target_adapter(api_client=api_client, job=self) + self.logger.info("Loading current data from target adapter.") + self.load_target_adapter(api_client=api_client) load_target_adapter_time = datetime.now() self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time self.sync.save() @@ -130,11 +134,12 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("sync") - def run(self, dryrun, memory_profiling, location, *args, **kwargs): + def run(self, dryrun, memory_profiling, location, location_descendants, *args, **kwargs): """Execute sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling self.location = location + self.location_descendants = location_descendants super().__init__(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) From d657a150ef22be63612ff02ed6d4d1a4180dc463 Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 10:48:58 -0500 Subject: [PATCH 195/229] update job logger --- .../integrations/itential/clients.py | 60 +++++++++---------- .../itential/diffsync/adapters/itential.py | 2 +- .../itential/diffsync/adapters/nautobot.py | 12 ++-- nautobot_ssot/integrations/itential/jobs.py | 2 + nautobot_ssot/tests/itential/fixtures/base.py | 4 +- .../tests/itential/fixtures/logger.py | 17 ++++-- 6 files changed, 53 insertions(+), 44 deletions(-) diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 2ceca9368..380da8e41 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -88,38 +88,38 @@ def login(self) -> Union[requests.Response, requests.HTTPError]: response = self._post(uri="login", json_data={"username": self.username, "password": self.password}) if response.ok: - self.job.log_info(message=f"Logging into {self.host}.") + self.job.logger.info(message=f"Logging into {self.host}.") self.cookie = {"AutomationGatewayToken": response.json()["token"]} self.session.headers.update(self.cookie) return response.json() - self.job.log_warning(message=f"Failed to login to {self.host}.") + self.job.logger.warning(message=f"Failed to login to {self.host}.") return response.raise_for_status() def logout(self) -> Union[requests.Response, requests.HTTPError]: """Logout of Automation Gateway.""" response = self._post(uri="logout") if response.ok: - self.job.log_info(message=f"Logging out of {self.host}.") + self.job.logger.info(message=f"Logging out of {self.host}.") return response.json() - self.job.log_warning(message=f"Failed logging out of {self.host}.") + self.job.logger.warning(message=f"Failed logging out of {self.host}.") return response.raise_for_status() def status(self) -> Union[requests.Response, requests.HTTPError]: """Get Automation Gateway status.""" response = self._get(uri="poll") if response.ok: - self.job.log_info(message=f"{self.host} polling is successful.") + self.job.logger.info(message=f"{self.host} polling is successful.") return response.json() - self.job.log_warning(message=f"Failed to poll {self.host}.") + self.job.logger.warning(message=f"Failed to poll {self.host}.") return response.raise_for_status() def get_devices(self) -> Union[requests.Response, requests.HTTPError]: """Get a devices.""" response = self._get(uri="devices") if response.ok: - self.job.log_info(message=f"Pulling devices from {self.host}.") + self.job.logger.info(message=f"Pulling devices from {self.host}.") return response.json() - self.job.log_warning(message=f"Failed pulling devices from {self.host}.") + self.job.logger.warning(message=f"Failed pulling devices from {self.host}.") return response.raise_for_status() def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -133,9 +133,9 @@ def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTP """ response = self._get(uri=f"devices/{device_name}") if response.ok: - self.job.log_info(message=f"Pulling {device_name} from {self.host}.") + self.job.logger.info(message=f"Pulling {device_name} from {self.host}.") return response.json() - self.job.log_warning(message=f"Failed pulling {device_name} from {self.host}.") + self.job.logger.warning(message=f"Failed pulling {device_name} from {self.host}.") return response.raise_for_status() def create_device( @@ -153,9 +153,9 @@ def create_device( payload = {"name": device_name, "variables": variables} response = self._post(uri="devices", json_data=payload) if response.ok: - self.job.log_info(message=f"Creating {device_name} on {self.host}.") + self.job.logger.info(message=f"Creating {device_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to create {device_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to create {device_name} on {self.host}.") return response.raise_for_status() def update_device( @@ -172,9 +172,9 @@ def update_device( """ response = self._put(uri=f"devices/{device_name}", json_data=variables) if response.ok: - self.job.log_info(message=f"Updating {device_name} on {self.host}.") + self.job.logger.info(message=f"Updating {device_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to update {device_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to update {device_name} on {self.host}.") return response.raise_for_status() def delete_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -188,18 +188,18 @@ def delete_device(self, device_name: str) -> Union[requests.Response, requests.H """ response = self._delete(uri=f"devices/{device_name}") if response.ok: - self.job.log_info(message=f"Deleting {device_name} on {self.host}.") + self.job.logger.info(message=f"Deleting {device_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to delete {device_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to delete {device_name} on {self.host}.") return response.raise_for_status() def get_groups(self) -> List[str]: """Get a groups.""" response = self._get(uri="groups") if response.ok: - self.job.log_info(message=f"Pulling groups from {self.host}.") + self.job.logger.info(message=f"Pulling groups from {self.host}.") return response.json() - self.job.log_warning(message=f"Failed pulling groups from {self.host}.") + self.job.logger.warning(message=f"Failed pulling groups from {self.host}.") return response.raise_for_status() def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -213,9 +213,9 @@ def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPEr """ response = self._get(uri=f"groups/{group_name}") if response.ok: - self.job.log_info(message=f"Pulling {group_name} from {self.host}.") + self.job.logger.info(message=f"Pulling {group_name} from {self.host}.") return response.json() - self.job.log_warning(message=f"Failed pulling {group_name} from {self.host}.") + self.job.logger.warning(message=f"Failed pulling {group_name} from {self.host}.") return response.raise_for_status() def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: @@ -231,9 +231,9 @@ def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requ payload = {"name": group_name, "variables": variables} response = self._post(uri="groups", json_data=payload) if response.ok: - self.job.log_info(message=f"Creating {group_name} on {self.host}.") + self.job.logger.info(message=f"Creating {group_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to create {group_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to create {group_name} on {self.host}.") return response.raise_for_status() def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: @@ -248,9 +248,9 @@ def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requ """ response = self._put(uri=f"groups/{group_name}", json_data=variables) if response.ok: - self.job.log_info(message=f"Updating {group_name} on {self.host}.") + self.job.logger.info(message=f"Updating {group_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to update {group_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to update {group_name} on {self.host}.") return response.raise_for_status() def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -264,9 +264,9 @@ def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTT """ response = self._delete(uri=f"groups/{group_name}") if response.ok: - self.job.log_info(message=f"Deleting {group_name} on {self.host}.") + self.job.logger.info(message=f"Deleting {group_name} on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to delete {group_name} on {self.host}.") + self.job.logger.warning(message=f"Failed to delete {group_name} on {self.host}.") return response.raise_for_status() def add_device_to_group(self, group_name: str, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -282,9 +282,9 @@ def add_device_to_group(self, group_name: str, device_name: str) -> Union[reques device_name = [device_name] response = self._post(uri=f"groups/{group_name}/devices", json_data=device_name) if response.ok: - self.job.log_info(message=f"Adding {device_name} to {group_name} group on {self.host}.") + self.job.logger.info(message=f"Adding {device_name} to {group_name} group on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to add {device_name} to {group_name} group on {self.host}.") + self.job.logger.warning(message=f"Failed to add {device_name} to {group_name} group on {self.host}.") return response.raise_for_status() def delete_device_from_group( @@ -301,7 +301,7 @@ def delete_device_from_group( """ response = self._delete(uri=f"groups/{group_name}/devices/{device_name}") if response.ok: - self.job.log_info(message=f"Deleting {device_name} from {group_name} group on {self.host}.") + self.job.logger.info(message=f"Deleting {device_name} from {group_name} group on {self.host}.") return response.json() - self.job.log_warning(message=f"Failed to delete {device_name} from {group_name} group on {self.host}.") + self.job.logger.warning(message=f"Failed to delete {device_name} from {group_name} group on {self.host}.") return response.raise_for_status() diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index 386054ab2..eaf3da2ed 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -20,7 +20,7 @@ def __init__(self, api_client: AutomationGatewayClient, job: object, sync: objec def load(self): """Load Adapter.""" - self.job.log_info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") + self.job.logger.info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") devices = self.api_client.get_devices().get("data") for iag_device in devices: diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index c187e01d9..b12f40e72 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -28,7 +28,7 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: """Check to see if a device name is RFC 1123 compliant.""" # Check for invalid characters (anything other than alphanumerics, hypens, and periods) if not re.search("[a-zA-Z0-9][a-zA-Z0-9-.]{0,62}$", device_name): - self.job.log_warning(message=f"{device_name} has iinvalid characters.") + self.job.logger.warning(message=f"{device_name} has iinvalid characters.") return False # RFC 1123 allows hostnames to start with a digit @@ -39,7 +39,7 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: for label in labels: if not re.match(label_pattern, label) or label.endswith("-"): - self.job.log_warning(message=f"{device_name} has an invalid hostname pattern.") + self.job.logger.warning(message=f"{device_name} has an invalid hostname pattern.") return False return True @@ -58,11 +58,11 @@ def _ansible_vars(self, device_obj: Device) -> dict: def load(self): """Load Nautobot Diffsync adapter.""" - self.job.log_info(message="Loading locations from Nautobot.") + self.job.logger.info(message="Loading locations from Nautobot.") location = Location.objects.get(name=self.location) locations = location.descendants(include_self=True) if self.location_descendants else location - self.job.log_info(message="Loading devices from Nautobot.") + self.job.logger.info(message="Loading devices from Nautobot.") devices = Device.objects.filter(location__in=locations).exclude(primary_ip4=None) for nb_device in devices: @@ -76,7 +76,7 @@ def load(self): raise Exception(f"{nb_device.name} is not RFC 1123 compliant.") except Exception as exc: stacktrace = traceback.format_exc() - self.job.log_warning(message=f"{nb_device.name} was not added to inventory due to an error.") - self.job.log_warning( + self.job.logger.warning(message=f"{nb_device.name} was not added to inventory due to an error.") + self.job.logger.warning( message=f"An exception ocurred: " f"`{type(exec).__name__}: {exc}`\n```\n{stacktrace}\n```" ) diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 5c8c5f8f2..02730ba8f 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -1,5 +1,7 @@ """Itential SSoT Jobs.""" +import tracemalloc + from datetime import datetime from django.forms import HiddenInput diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index 3faf145bb..ef05931dc 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -65,9 +65,9 @@ def setUp(self): self.gateway = AutomationGatewayModel.objects.first() self.client = clients.api_client(self.gateway) - self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job) + self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( - job=self.job, location="North America", location_descendants=True + job=self.job, location="North America", location_descendants=True, sync=None ) self.itential_adapter.load() diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py index 4718d2e55..50673054d 100644 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -3,17 +3,24 @@ import logging -class JobLogger: - """Job Logger.""" +class Logger: + """Logger.""" - def log_info(self, message: str): + def info(self, message: str): """Info logging.""" logging.info(message) - def log_warning(self, message: str): + def warning(self, message: str): """Warning logging.""" logging.warning(message) - def log_failure(self, message: str): + def failure(self, message: str): """Failure logging.""" logging.error(message) + + +class JobLogger: + """Job Logger.""" + + def __init__(self): + self.logger = Logger() From 4f1c4c7e3570cbaac6867f563686bc5a9001db1c Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 15:50:33 -0500 Subject: [PATCH 196/229] itential job --- .../integrations/itential/clients.py | 60 ++++----- .../itential/diffsync/adapters/itential.py | 2 +- .../itential/diffsync/adapters/nautobot.py | 16 ++- nautobot_ssot/integrations/itential/jobs.py | 120 ++++++++---------- nautobot_ssot/tests/itential/fixtures/base.py | 67 ++++++++++ .../tests/itential/fixtures/logger.py | 12 +- nautobot_ssot/tests/itential/fixtures/urls.py | 5 + nautobot_ssot/tests/itential/test_jobs.py | 24 ++++ 8 files changed, 195 insertions(+), 111 deletions(-) diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 380da8e41..884841025 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -88,38 +88,38 @@ def login(self) -> Union[requests.Response, requests.HTTPError]: response = self._post(uri="login", json_data={"username": self.username, "password": self.password}) if response.ok: - self.job.logger.info(message=f"Logging into {self.host}.") + self.job.logger.info(f"Logging into {self.host}.") self.cookie = {"AutomationGatewayToken": response.json()["token"]} self.session.headers.update(self.cookie) return response.json() - self.job.logger.warning(message=f"Failed to login to {self.host}.") + self.job.logger.warning(f"Failed to login to {self.host}.") return response.raise_for_status() def logout(self) -> Union[requests.Response, requests.HTTPError]: """Logout of Automation Gateway.""" response = self._post(uri="logout") if response.ok: - self.job.logger.info(message=f"Logging out of {self.host}.") + self.job.logger.info(f"Logging out of {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed logging out of {self.host}.") + self.job.logger.warning(f"Failed logging out of {self.host}.") return response.raise_for_status() def status(self) -> Union[requests.Response, requests.HTTPError]: """Get Automation Gateway status.""" response = self._get(uri="poll") if response.ok: - self.job.logger.info(message=f"{self.host} polling is successful.") + self.job.logger.info(f"{self.host} polling is successful.") return response.json() - self.job.logger.warning(message=f"Failed to poll {self.host}.") + self.job.logger.warning(f"Failed to poll {self.host}.") return response.raise_for_status() def get_devices(self) -> Union[requests.Response, requests.HTTPError]: """Get a devices.""" response = self._get(uri="devices") if response.ok: - self.job.logger.info(message=f"Pulling devices from {self.host}.") + self.job.logger.info(f"Pulling devices from {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed pulling devices from {self.host}.") + self.job.logger.warning(f"Failed pulling devices from {self.host}.") return response.raise_for_status() def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -133,9 +133,9 @@ def get_device(self, device_name: str) -> Union[requests.Response, requests.HTTP """ response = self._get(uri=f"devices/{device_name}") if response.ok: - self.job.logger.info(message=f"Pulling {device_name} from {self.host}.") + self.job.logger.info(f"Pulling {device_name} from {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed pulling {device_name} from {self.host}.") + self.job.logger.warning(f"Failed pulling {device_name} from {self.host}.") return response.raise_for_status() def create_device( @@ -153,9 +153,9 @@ def create_device( payload = {"name": device_name, "variables": variables} response = self._post(uri="devices", json_data=payload) if response.ok: - self.job.logger.info(message=f"Creating {device_name} on {self.host}.") + self.job.logger.info(f"Creating {device_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to create {device_name} on {self.host}.") + self.job.logger.warning(f"Failed to create {device_name} on {self.host}.") return response.raise_for_status() def update_device( @@ -172,9 +172,9 @@ def update_device( """ response = self._put(uri=f"devices/{device_name}", json_data=variables) if response.ok: - self.job.logger.info(message=f"Updating {device_name} on {self.host}.") + self.job.logger.info(f"Updating {device_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to update {device_name} on {self.host}.") + self.job.logger.warning(f"Failed to update {device_name} on {self.host}.") return response.raise_for_status() def delete_device(self, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -188,18 +188,18 @@ def delete_device(self, device_name: str) -> Union[requests.Response, requests.H """ response = self._delete(uri=f"devices/{device_name}") if response.ok: - self.job.logger.info(message=f"Deleting {device_name} on {self.host}.") + self.job.logger.info(f"Deleting {device_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to delete {device_name} on {self.host}.") + self.job.logger.warning(f"Failed to delete {device_name} on {self.host}.") return response.raise_for_status() def get_groups(self) -> List[str]: """Get a groups.""" response = self._get(uri="groups") if response.ok: - self.job.logger.info(message=f"Pulling groups from {self.host}.") + self.job.logger.info(f"Pulling groups from {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed pulling groups from {self.host}.") + self.job.logger.warning(f"Failed pulling groups from {self.host}.") return response.raise_for_status() def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -213,9 +213,9 @@ def get_group(self, group_name: str) -> Union[requests.Response, requests.HTTPEr """ response = self._get(uri=f"groups/{group_name}") if response.ok: - self.job.logger.info(message=f"Pulling {group_name} from {self.host}.") + self.job.logger.info(f"Pulling {group_name} from {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed pulling {group_name} from {self.host}.") + self.job.logger.warning(f"Failed pulling {group_name} from {self.host}.") return response.raise_for_status() def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: @@ -231,9 +231,9 @@ def create_group(self, group_name: str, variables: Optional[dict]) -> Union[requ payload = {"name": group_name, "variables": variables} response = self._post(uri="groups", json_data=payload) if response.ok: - self.job.logger.info(message=f"Creating {group_name} on {self.host}.") + self.job.logger.info(f"Creating {group_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to create {group_name} on {self.host}.") + self.job.logger.warning(f"Failed to create {group_name} on {self.host}.") return response.raise_for_status() def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requests.Response, requests.HTTPError]: @@ -248,9 +248,9 @@ def update_group(self, group_name: str, variables: Optional[dict]) -> Union[requ """ response = self._put(uri=f"groups/{group_name}", json_data=variables) if response.ok: - self.job.logger.info(message=f"Updating {group_name} on {self.host}.") + self.job.logger.info(f"Updating {group_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to update {group_name} on {self.host}.") + self.job.logger.warning(f"Failed to update {group_name} on {self.host}.") return response.raise_for_status() def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -264,9 +264,9 @@ def delete_group(self, group_name: str) -> Union[requests.Response, requests.HTT """ response = self._delete(uri=f"groups/{group_name}") if response.ok: - self.job.logger.info(message=f"Deleting {group_name} on {self.host}.") + self.job.logger.info(f"Deleting {group_name} on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to delete {group_name} on {self.host}.") + self.job.logger.warning(f"Failed to delete {group_name} on {self.host}.") return response.raise_for_status() def add_device_to_group(self, group_name: str, device_name: str) -> Union[requests.Response, requests.HTTPError]: @@ -282,9 +282,9 @@ def add_device_to_group(self, group_name: str, device_name: str) -> Union[reques device_name = [device_name] response = self._post(uri=f"groups/{group_name}/devices", json_data=device_name) if response.ok: - self.job.logger.info(message=f"Adding {device_name} to {group_name} group on {self.host}.") + self.job.logger.info(f"Adding {device_name} to {group_name} group on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to add {device_name} to {group_name} group on {self.host}.") + self.job.logger.warning(f"Failed to add {device_name} to {group_name} group on {self.host}.") return response.raise_for_status() def delete_device_from_group( @@ -301,7 +301,7 @@ def delete_device_from_group( """ response = self._delete(uri=f"groups/{group_name}/devices/{device_name}") if response.ok: - self.job.logger.info(message=f"Deleting {device_name} from {group_name} group on {self.host}.") + self.job.logger.info(f"Deleting {device_name} from {group_name} group on {self.host}.") return response.json() - self.job.logger.warning(message=f"Failed to delete {device_name} from {group_name} group on {self.host}.") + self.job.logger.warning(f"Failed to delete {device_name} from {group_name} group on {self.host}.") return response.raise_for_status() diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index eaf3da2ed..a8b15e9e0 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -20,7 +20,7 @@ def __init__(self, api_client: AutomationGatewayClient, job: object, sync: objec def load(self): """Load Adapter.""" - self.job.logger.info(message=f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") + self.job.logger.info(f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") devices = self.api_client.get_devices().get("data") for iag_device in devices: diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index b12f40e72..6fce76630 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -7,6 +7,7 @@ from nautobot_ssot.integrations.itential.diffsync.models.nautobot import NautobotAnsibleDeviceModel +from nautobot.extras.models import Status from nautobot.dcim.models import Device, Location @@ -28,7 +29,7 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: """Check to see if a device name is RFC 1123 compliant.""" # Check for invalid characters (anything other than alphanumerics, hypens, and periods) if not re.search("[a-zA-Z0-9][a-zA-Z0-9-.]{0,62}$", device_name): - self.job.logger.warning(message=f"{device_name} has iinvalid characters.") + self.job.logger.warning(f"{device_name} has iinvalid characters.") return False # RFC 1123 allows hostnames to start with a digit @@ -39,7 +40,7 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: for label in labels: if not re.match(label_pattern, label) or label.endswith("-"): - self.job.logger.warning(message=f"{device_name} has an invalid hostname pattern.") + self.job.logger.warning(f"{device_name} has an invalid hostname pattern.") return False return True @@ -58,12 +59,13 @@ def _ansible_vars(self, device_obj: Device) -> dict: def load(self): """Load Nautobot Diffsync adapter.""" - self.job.logger.info(message="Loading locations from Nautobot.") + self.job.logger.info("Loading locations from Nautobot.") location = Location.objects.get(name=self.location) locations = location.descendants(include_self=True) if self.location_descendants else location + status = Status.objects.get(name="Active") - self.job.logger.info(message="Loading devices from Nautobot.") - devices = Device.objects.filter(location__in=locations).exclude(primary_ip4=None) + self.job.logger.info("Loading devices from Nautobot.") + devices = Device.objects.filter(location__in=locations, status=status).exclude(primary_ip4=None) for nb_device in devices: try: @@ -76,7 +78,7 @@ def load(self): raise Exception(f"{nb_device.name} is not RFC 1123 compliant.") except Exception as exc: stacktrace = traceback.format_exc() - self.job.logger.warning(message=f"{nb_device.name} was not added to inventory due to an error.") + self.job.logger.warning(f"{nb_device.name} was not added to inventory due to an error.") self.job.logger.warning( - message=f"An exception ocurred: " f"`{type(exec).__name__}: {exc}`\n```\n{stacktrace}\n```" + f"An exception ocurred: " f"`{type(exec).__name__}: {exc}`\n```\n{stacktrace}\n```" ) diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 02730ba8f..65790e795 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -4,10 +4,7 @@ from datetime import datetime -from django.forms import HiddenInput - -from nautobot.dcim.models import Location -from nautobot.extras.jobs import BooleanVar, ObjectVar, Job +from nautobot.extras.jobs import ObjectVar from nautobot_ssot.jobs.base import DataTarget from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices @@ -21,12 +18,10 @@ name = "SSoT - Itential" # pylint: disable=invalid-name -class ItentialAutomationGatewayDataTarget(DataTarget, Job): +class ItentialAutomationGatewayDataTarget(DataTarget): """Job syncing Nautobot to Itential Automation Gateway.""" - dryrun = BooleanVar(default=False, widget=HiddenInput(), required=True) - location = ObjectVar(model=Location, description="Choose a location to sync to.", required=True) - location_descendants = BooleanVar(default=True, required=True) + gateway = ObjectVar(model=AutomationGatewayModel, description="Choose a gateway to sync to.", required=True) class Meta: """Meta class definition.""" @@ -35,19 +30,12 @@ class Meta: data_target = "Itential Automation Gateway" # data_source_icon = static("nautobot_ssot_itential/itential.png") description = "Sync data from Nautobot into Itential Automation Gateway." - field_order = ("location", "location_descendants", "dry_run") - - @property - def gateways(self): - """Fetch Automation Gateways to sync.""" - self.logger.info(f"Loading gateays for {self.location}.") - gateways = AutomationGatewayClient.objects.filter(enabled=True, location=self.location) - return gateways + has_sensitive_variables = False - def load_source_adapter(self, location: Location, location_descendants: bool): + def load_source_adapter(self): """Load Nautobot adapter.""" self.source_adapter = NautobotAnsibleDeviceAdapter( - job=self, sync=self.sync, location=location, location_descendants=location_descendants + job=self, sync=self.sync, location=self.location, location_descendants=self.location_descendants ) self.logger.info("Loading data from Nautobot.") self.source_adapter.load() @@ -78,8 +66,7 @@ def record_memory_trace(step: str): start_time = datetime.now() - self.logger.info("Loading current data from source adapter...") - self.load_source_adapter(location=self.location, location_descendants=self.location_descendants) + self.load_source_adapter() load_source_adapter_time = datetime.now() self.sync.source_load_time = load_source_adapter_time - start_time self.sync.save() @@ -88,61 +75,60 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("source_load") - for device in self.gateways: - with AutomationGatewayClient( - host=device.gateway.remote_url, - username=device.gateway.secrets_group.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_REST, - secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, - ), - password=device.gateway.secrets_group.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_REST, - secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, - ), - job=self, - verify_ssl=device.gateway.verify_ssl, - ) as api_client: - self.logger.info("Loading current data from target adapter.") - self.load_target_adapter(api_client=api_client) - load_target_adapter_time = datetime.now() - self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time - self.sync.save() - self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + with AutomationGatewayClient( + host=self.gateway.gateway.remote_url, + username=self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ), + password=self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_REST, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ), + job=self, + verify_ssl=self.gateway.gateway.verify_ssl, + ) as api_client: + self.load_target_adapter(api_client=api_client) + load_target_adapter_time = datetime.now() + self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time + self.sync.save() + self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) - if memory_profiling: - record_memory_trace("target_load") + if memory_profiling: + record_memory_trace("target_load") - self.logger.info("Calculating diffs...") - self.calculate_diff() - calculate_diff_time = datetime.now() - self.sync.diff_time = calculate_diff_time - load_target_adapter_time + self.logger.info("Calculating diffs...") + self.calculate_diff() + calculate_diff_time = datetime.now() + self.sync.diff_time = calculate_diff_time - load_target_adapter_time + self.sync.save() + self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) + + if memory_profiling: + record_memory_trace("diff") + + if self.dryrun: + self.logger.info("As `dryrun` is set, skipping the actual data sync.") + else: + self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) + self.execute_sync() + execute_sync_time = datetime.now() + self.sync.sync_time = execute_sync_time - calculate_diff_time self.sync.save() - self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) + self.logger.info("Sync complete") + self.logger.info("Sync Time: %s", self.sync.sync_time) if memory_profiling: - record_memory_trace("diff") - - if self.dryrun: - self.logger.info("As `dryrun` is set, skipping the actual data sync.") - else: - self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) - self.execute_sync() - execute_sync_time = datetime.now() - self.sync.sync_time = execute_sync_time - calculate_diff_time - self.sync.save() - self.logger.info("Sync complete") - self.logger.info("Sync Time: %s", self.sync.sync_time) - - if memory_profiling: - record_memory_trace("sync") - - def run(self, dryrun, memory_profiling, location, location_descendants, *args, **kwargs): + record_memory_trace("sync") + + def run(self, dryrun, memory_profiling, gateway, *args, **kwargs): # pylint: disable=arguments-differ """Execute sync.""" + self.gateway = gateway + self.location = self.gateway.location + self.location_descendants = self.gateway.location_descendants self.dryrun = dryrun self.memory_profiling = memory_profiling - self.location = location - self.location_descendants = location_descendants - super().__init__(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) jobs = [ItentialAutomationGatewayDataTarget] diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index ef05931dc..f90a5523b 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -6,6 +6,8 @@ # from unittest import TestCase from nautobot.apps.testing import TestCase +from nautobot.apps.testing import TransactionTestCase + from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from nautobot_ssot.integrations.itential.diffsync.adapters import itential, nautobot @@ -76,3 +78,68 @@ def setUp(self): def tearDown(self): """Teardown test cases.""" self.requests_mock.stop() + + +class ItentialSSoTBaseTransactionTestCase(TransactionTestCase): + """Itential Automation Gateway Client Test Cases.""" + + def setUp(self): + """Setup test cases.""" + self.job = JobLogger() + self.requests_mock = requests_mock.Mocker() + self.requests_mock.start() + + for device in gateways.gateways: + os.environ[device.get("username_env")] = "testUser" + os.environ[device.get("password_env")] = "testPass" + + gateways.update_or_create_automation_gateways( + name=device.get("name"), + description=device.get("description"), + location=device.get("location"), + region=device.get("region"), + gateway=device.get("gateway"), + enabled=device.get("enabled"), + username_env=device.get("username_env"), + password_env=device.get("password_env"), + secret_group=device.get("secret_group"), + ) + + for url_item in urls.data: + self.requests_mock.register_uri( + method=url_item.get("method"), + url=url_item.get("url"), + json=url_item.get("json"), + status_code=url_item.get("status_code", 200), + headers=url_item.get("headers", {}), + cookies=url_item.get("cookies", {}), + ) + + for device in devices.data: + devices.update_or_create_device_object( + status=device.get("status"), + role=device.get("role"), + name=device.get("name"), + location=device.get("location"), + manufacturer=device.get("manufacturer"), + platform=device.get("platform"), + network_driver=device.get("network_driver"), + model=device.get("model"), + interface=device.get("interface"), + ip_address=device.get("ip_address"), + config_context=device.get("config_context"), + ) + + self.gateway = AutomationGatewayModel.objects.first() + self.client = clients.api_client(self.gateway) + self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) + self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( + job=self.job, location="North America", location_descendants=True, sync=None + ) + + self.itential_adapter.load() + self.nautobot_adapter.load() + + def tearDown(self): + """Teardown test cases.""" + self.requests_mock.stop() diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py index 50673054d..d4046ed97 100644 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ b/nautobot_ssot/tests/itential/fixtures/logger.py @@ -6,17 +6,17 @@ class Logger: """Logger.""" - def info(self, message: str): + def info(self, msg: str): """Info logging.""" - logging.info(message) + logging.info(msg) - def warning(self, message: str): + def warning(self, msg: str): """Warning logging.""" - logging.warning(message) + logging.warning(msg) - def failure(self, message: str): + def failure(self, msg: str): """Failure logging.""" - logging.error(message) + logging.error(msg) class JobLogger: diff --git a/nautobot_ssot/tests/itential/fixtures/urls.py b/nautobot_ssot/tests/itential/fixtures/urls.py index 208e64741..968d51a1b 100644 --- a/nautobot_ssot/tests/itential/fixtures/urls.py +++ b/nautobot_ssot/tests/itential/fixtures/urls.py @@ -39,6 +39,11 @@ "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr10.example.net", "json": gateways.responses["iag1"]["responses"].get("update_device"), }, + { + "method": "DELETE", + "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr10.example.net", + "json": gateways.responses["iag1"]["responses"].get("delete_device"), + }, { "method": "DELETE", "url": f"{gateways.responses['iag1'].get('hostname')}/api/v2.0/devices/rtr12.example.net", diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 49492a073..23235c002 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -1 +1,25 @@ """Itential SSoT Jobs Test Cases.""" + +from nautobot.extras.models import Job, JobLogEntry +from nautobot.apps.testing import run_job_for_testing + +from nautobot_ssot.tests.itential.fixtures import base + + +class ItentialSSoTJobsTestCase(base.ItentialSSoTBaseTransactionTestCase): + """Itential SSoT Jobs Test Cases.""" + + databases = ("default", "job_logs") + + def test_successful_job(self): + """Test successful job.""" + self.job = Job.objects.get( + job_class_name="ItentialAutomationGatewayDataTarget", + module_name="nautobot_ssot.integrations.itential.jobs", + ) + job_result = run_job_for_testing(self.job, dryrun=False, memory_profiling=False, gateway=self.gateway.pk) + log_entries = JobLogEntry.objects.filter(job_result=job_result) + self.assertGreater(log_entries.count(), 1) + log_entries = [log_entry.message for log_entry in log_entries] + summary_output = "{'create': 1, 'update': 1, 'delete': 1, 'no-change': 0, 'skip': 0}" + self.assertIn(summary_output, log_entries) From 69d05eaee69c22aeb9dbdd616c5c3fee9622a283 Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 16:20:14 -0500 Subject: [PATCH 197/229] add itential job --- nautobot_ssot/integrations/itential/jobs.py | 63 ++++++++++--------- .../tests/itential/fixtures/devices.py | 2 +- nautobot_ssot/tests/itential/test_diffsync.py | 4 +- nautobot_ssot/tests/itential/test_jobs.py | 5 +- 4 files changed, 40 insertions(+), 34 deletions(-) diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 65790e795..418bdb66e 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -75,7 +75,7 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("source_load") - with AutomationGatewayClient( + api_client = AutomationGatewayClient( host=self.gateway.gateway.remote_url, username=self.gateway.gateway.secrets_group.get_secret_value( access_type=SecretsGroupAccessTypeChoices.TYPE_REST, @@ -87,39 +87,44 @@ def record_memory_trace(step: str): ), job=self, verify_ssl=self.gateway.gateway.verify_ssl, - ) as api_client: - self.load_target_adapter(api_client=api_client) - load_target_adapter_time = datetime.now() - self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time - self.sync.save() - self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + ) - if memory_profiling: - record_memory_trace("target_load") + api_client.login() - self.logger.info("Calculating diffs...") - self.calculate_diff() - calculate_diff_time = datetime.now() - self.sync.diff_time = calculate_diff_time - load_target_adapter_time + self.load_target_adapter(api_client=api_client) + load_target_adapter_time = datetime.now() + self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time + self.sync.save() + self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + + if memory_profiling: + record_memory_trace("target_load") + + self.logger.info("Calculating diffs...") + self.calculate_diff() + calculate_diff_time = datetime.now() + self.sync.diff_time = calculate_diff_time - load_target_adapter_time + self.sync.save() + self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) + + if memory_profiling: + record_memory_trace("diff") + + if self.dryrun: + self.logger.info("As `dryrun` is set, skipping the actual data sync.") + else: + self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) + self.execute_sync() + execute_sync_time = datetime.now() + self.sync.sync_time = execute_sync_time - calculate_diff_time self.sync.save() - self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) + self.logger.info("Sync complete") + self.logger.info("Sync Time: %s", self.sync.sync_time) if memory_profiling: - record_memory_trace("diff") - - if self.dryrun: - self.logger.info("As `dryrun` is set, skipping the actual data sync.") - else: - self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) - self.execute_sync() - execute_sync_time = datetime.now() - self.sync.sync_time = execute_sync_time - calculate_diff_time - self.sync.save() - self.logger.info("Sync complete") - self.logger.info("Sync Time: %s", self.sync.sync_time) - - if memory_profiling: - record_memory_trace("sync") + record_memory_trace("sync") + + api_client.logout() def run(self, dryrun, memory_profiling, gateway, *args, **kwargs): # pylint: disable=arguments-differ """Execute sync.""" diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index 6caf71c5b..f5123a8a0 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -92,7 +92,7 @@ def update_or_create_device_object( ): """Create or update device fixtures.""" status = Status.objects.get(name=status) - namespace = Namespace.objects.get(name="Global") + namespace, _ = Namespace.objects.get_or_create(name="Global") ip_prefix, _ = Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) device_content_type = ContentType.objects.get_for_model(Device) role, role_changed = Role.objects.update_or_create(name=role) diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py index 20c3e4ea9..4c16a867f 100644 --- a/nautobot_ssot/tests/itential/test_diffsync.py +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -6,12 +6,12 @@ class DiffSyncTestCases(base.ItentialSSoTBaseTestCase): """DiffSync test cases.""" - def test_inventory_diff(self): + def test_diff__success(self): """Test diff exists.""" diff = self.nautobot_adapter.diff_to(self.itential_adapter) self.assertTrue(diff.has_diffs()) - def test_inventory_sync(self): + def test_sync__success(self): """Test successful sync.""" self.nautobot_adapter.sync_to(self.itential_adapter) diff = self.nautobot_adapter.diff_to(self.itential_adapter) diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 23235c002..0f54159e0 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -11,7 +11,7 @@ class ItentialSSoTJobsTestCase(base.ItentialSSoTBaseTransactionTestCase): databases = ("default", "job_logs") - def test_successful_job(self): + def test_job__success(self): """Test successful job.""" self.job = Job.objects.get( job_class_name="ItentialAutomationGatewayDataTarget", @@ -21,5 +21,6 @@ def test_successful_job(self): log_entries = JobLogEntry.objects.filter(job_result=job_result) self.assertGreater(log_entries.count(), 1) log_entries = [log_entry.message for log_entry in log_entries] - summary_output = "{'create': 1, 'update': 1, 'delete': 1, 'no-change': 0, 'skip': 0}" + summary_output = "{'create': 1, 'update': 1, 'delete': 1, 'no-change': 1, 'skip': 0}" self.assertIn(summary_output, log_entries) + self.assertIn("Sync complete", log_entries) From d96190aaab74d050361d809896f2577176c774bb Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 18:56:02 -0500 Subject: [PATCH 198/229] rebasing - resolving conflicts --- nautobot_ssot/api/urls.py | 1 - nautobot_ssot/integrations/itential/api/urls.py | 4 ++-- .../itential/diffsync/adapters/nautobot.py | 10 ++++++---- nautobot_ssot/integrations/itential/jobs.py | 11 +++++++++-- nautobot_ssot/integrations/itential/views.py | 1 + nautobot_ssot/integrations/utils.py | 5 ++++- nautobot_ssot/tests/itential/fixtures/base.py | 7 +++++-- nautobot_ssot/tests/itential/test_jobs.py | 4 +++- 8 files changed, 30 insertions(+), 13 deletions(-) diff --git a/nautobot_ssot/api/urls.py b/nautobot_ssot/api/urls.py index 3fd743efa..a45c5cad9 100644 --- a/nautobot_ssot/api/urls.py +++ b/nautobot_ssot/api/urls.py @@ -3,7 +3,6 @@ from nautobot_ssot.integrations.utils import each_enabled_integration_module app_name = "ssot" # pylint: disable=invalid-name - urlpatterns = [] diff --git a/nautobot_ssot/integrations/itential/api/urls.py b/nautobot_ssot/integrations/itential/api/urls.py index 8c191038f..7859c6d5e 100644 --- a/nautobot_ssot/integrations/itential/api/urls.py +++ b/nautobot_ssot/integrations/itential/api/urls.py @@ -4,7 +4,7 @@ from nautobot_ssot.integrations.itential.api import views -router = OrderedDefaultRouter(view_name="Itential SSoT") -router.register("models", views.AutomationGatewayModelViewSet) +router = OrderedDefaultRouter() +router.register("itential/automation-gateway", views.AutomationGatewayModelViewSet) urlpatterns = router.urls diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index 6fce76630..c9ae20a78 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -17,13 +17,16 @@ class NautobotAnsibleDeviceAdapter(DiffSync): device = NautobotAnsibleDeviceModel top_level = ["device"] - def __init__(self, job: object, sync: object, location: str, location_descendants: bool, *args, **kwargs): + def __init__( + self, job: object, sync: object, location: Location, location_descendants: bool, status: Status, *args, **kwargs + ): """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" super().__init__(*args, **kwargs) self.job = job - self.location = location self.sync = sync + self.location = location self.location_descendants = location_descendants + self.status = status def _is_rfc1123_compliant(self, device_name: str) -> bool: """Check to see if a device name is RFC 1123 compliant.""" @@ -62,10 +65,9 @@ def load(self): self.job.logger.info("Loading locations from Nautobot.") location = Location.objects.get(name=self.location) locations = location.descendants(include_self=True) if self.location_descendants else location - status = Status.objects.get(name="Active") self.job.logger.info("Loading devices from Nautobot.") - devices = Device.objects.filter(location__in=locations, status=status).exclude(primary_ip4=None) + devices = Device.objects.filter(location__in=locations, status=self.status.pk).exclude(primary_ip4=None) for nb_device in devices: try: diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 418bdb66e..7c1aa5303 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -4,6 +4,7 @@ from datetime import datetime +from nautobot.extras.models import Status from nautobot.extras.jobs import ObjectVar from nautobot_ssot.jobs.base import DataTarget @@ -22,6 +23,7 @@ class ItentialAutomationGatewayDataTarget(DataTarget): """Job syncing Nautobot to Itential Automation Gateway.""" gateway = ObjectVar(model=AutomationGatewayModel, description="Choose a gateway to sync to.", required=True) + status = ObjectVar(model=Status, description="Choose a device status to sync.", required=True) class Meta: """Meta class definition.""" @@ -35,7 +37,11 @@ class Meta: def load_source_adapter(self): """Load Nautobot adapter.""" self.source_adapter = NautobotAnsibleDeviceAdapter( - job=self, sync=self.sync, location=self.location, location_descendants=self.location_descendants + job=self, + sync=self.sync, + location=self.location, + location_descendants=self.location_descendants, + status=self.status, ) self.logger.info("Loading data from Nautobot.") self.source_adapter.load() @@ -126,9 +132,10 @@ def record_memory_trace(step: str): api_client.logout() - def run(self, dryrun, memory_profiling, gateway, *args, **kwargs): # pylint: disable=arguments-differ + def run(self, dryrun, memory_profiling, gateway, status, *args, **kwargs): # pylint: disable=arguments-differ """Execute sync.""" self.gateway = gateway + self.status = status self.location = self.gateway.location self.location_descendants = self.gateway.location_descendants self.dryrun = dryrun diff --git a/nautobot_ssot/integrations/itential/views.py b/nautobot_ssot/integrations/itential/views.py index cfaf63759..30d9774c3 100644 --- a/nautobot_ssot/integrations/itential/views.py +++ b/nautobot_ssot/integrations/itential/views.py @@ -15,3 +15,4 @@ class AutomationGatewayModelUIViewSet(views.NautobotUIViewSet): queryset = models.AutomationGatewayModel.objects.all() serializer_class = serializers.AutomationGatewayModelSerializer table_class = tables.AutomationGatewayModelTable + lookup_field = "pk" diff --git a/nautobot_ssot/integrations/utils.py b/nautobot_ssot/integrations/utils.py index 810e959ae..30103f755 100644 --- a/nautobot_ssot/integrations/utils.py +++ b/nautobot_ssot/integrations/utils.py @@ -23,7 +23,10 @@ def each_enabled_integration_module(module_name: str) -> Generator[ModuleType, N """For each enabled integration, import the module name.""" for name in each_enabled_integration(): try: - module = import_module(f"nautobot_ssot.integrations.{name}.{module_name}") + if api: + module = import_module(f"nautobot_ssot.integrations.{name}.api.{module_name}") + else: + module = import_module(f"nautobot_ssot.integrations.{name}.{module_name}") except ModuleNotFoundError: logger.debug("Integration %s does not have a %s module, skipping.", name, module_name) continue diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index f90a5523b..6b195275d 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -8,6 +8,7 @@ from nautobot.apps.testing import TestCase from nautobot.apps.testing import TransactionTestCase +from nautobot.extras.models import Status from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from nautobot_ssot.integrations.itential.diffsync.adapters import itential, nautobot @@ -65,11 +66,12 @@ def setUp(self): config_context=device.get("config_context"), ) + self.status, _ = Status.objects.get_or_create(name="Active") self.gateway = AutomationGatewayModel.objects.first() self.client = clients.api_client(self.gateway) self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( - job=self.job, location="North America", location_descendants=True, sync=None + job=self.job, location="North America", location_descendants=True, status=self.status, sync=None ) self.itential_adapter.load() @@ -130,11 +132,12 @@ def setUp(self): config_context=device.get("config_context"), ) + self.status, _ = Status.objects.get_or_create(name="Active") self.gateway = AutomationGatewayModel.objects.first() self.client = clients.api_client(self.gateway) self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( - job=self.job, location="North America", location_descendants=True, sync=None + job=self.job, location="North America", location_descendants=True, status=self.status, sync=None ) self.itential_adapter.load() diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 0f54159e0..1b6c25794 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -17,7 +17,9 @@ def test_job__success(self): job_class_name="ItentialAutomationGatewayDataTarget", module_name="nautobot_ssot.integrations.itential.jobs", ) - job_result = run_job_for_testing(self.job, dryrun=False, memory_profiling=False, gateway=self.gateway.pk) + job_result = run_job_for_testing( + self.job, dryrun=False, memory_profiling=False, gateway=self.gateway.pk, status=self.status.pk + ) log_entries = JobLogEntry.objects.filter(job_result=job_result) self.assertGreater(log_entries.count(), 1) log_entries = [log_entry.message for log_entry in log_entries] From 409d5e8c50e7d480164a5696c4ebd995a5a3c4be Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 18:58:23 -0500 Subject: [PATCH 199/229] resolve bandit error --- nautobot_ssot/integrations/itential/clients.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 884841025..836bb7671 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -303,5 +303,5 @@ def delete_device_from_group( if response.ok: self.job.logger.info(f"Deleting {device_name} from {group_name} group on {self.host}.") return response.json() - self.job.logger.warning(f"Failed to delete {device_name} from {group_name} group on {self.host}.") + self.job.logger.warning(f"Failed to delete {device_name} from {group_name} group on {self.host}.") # nosec return response.raise_for_status() From df02df7f6aeb0d9ab393f3c093650ccdfaa6becc Mon Sep 17 00:00:00 2001 From: jtdub Date: Wed, 17 Apr 2024 19:22:06 -0500 Subject: [PATCH 200/229] add check to see if gateway is enabled. --- nautobot_ssot/integrations/itential/jobs.py | 4 ++++ nautobot_ssot/tests/itential/test_jobs.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 7c1aa5303..7513e0565 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -67,6 +67,10 @@ def record_memory_trace(step: str): if not self.sync: return + if not self.gateway.enabled: + self.logger.warning(f"{self.gateway.gateway.remote_url} is not enabled to sync inventory.") + return + if memory_profiling: tracemalloc.start() diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 1b6c25794..a1cab9d5e 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -5,6 +5,8 @@ from nautobot_ssot.tests.itential.fixtures import base +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel + class ItentialSSoTJobsTestCase(base.ItentialSSoTBaseTransactionTestCase): """Itential SSoT Jobs Test Cases.""" @@ -26,3 +28,19 @@ def test_job__success(self): summary_output = "{'create': 1, 'update': 1, 'delete': 1, 'no-change': 1, 'skip': 0}" self.assertIn(summary_output, log_entries) self.assertIn("Sync complete", log_entries) + + def test_job__disabled_gateway(self): + """Test job with disabled automation gateway.""" + gateway = AutomationGatewayModel.objects.get(name="IAG10") + self.job = Job.objects.get( + job_class_name="ItentialAutomationGatewayDataTarget", + module_name="nautobot_ssot.integrations.itential.jobs", + ) + job_result = run_job_for_testing( + self.job, dryrun=False, memory_profiling=False, gateway=gateway.pk, status=self.status.pk + ) + log_entries = JobLogEntry.objects.filter(job_result=job_result) + self.assertGreater(log_entries.count(), 1) + log_entries = [log_entry.message for log_entry in log_entries] + summary_output = f"{gateway.gateway.remote_url} is not enabled to sync inventory." + self.assertIn(summary_output, log_entries) From 700ae698aca23fcf4b88bdb8d451cfd6fb91c0c6 Mon Sep 17 00:00:00 2001 From: jtdub Date: Thu, 18 Apr 2024 10:34:41 -0500 Subject: [PATCH 201/229] pylint --- .../integrations/itential/api/serializers.py | 2 +- .../integrations/itential/api/views.py | 2 +- .../integrations/itential/clients.py | 8 +-- .../itential/diffsync/adapters/itential.py | 2 + .../itential/diffsync/adapters/nautobot.py | 12 ++-- .../itential/diffsync/models/__init__.py | 15 +++++ .../itential/diffsync/models/itential.py | 13 +--- .../itential/diffsync/models/nautobot.py | 13 +--- nautobot_ssot/integrations/itential/forms.py | 2 +- nautobot_ssot/integrations/itential/jobs.py | 10 ++-- nautobot_ssot/tests/itential/fixtures/base.py | 12 ++++ .../tests/itential/fixtures/devices.py | 4 +- .../tests/itential/fixtures/gateways.py | 59 +++++++++++++++++-- nautobot_ssot/tests/itential/test_clients.py | 24 ++++---- 14 files changed, 123 insertions(+), 55 deletions(-) diff --git a/nautobot_ssot/integrations/itential/api/serializers.py b/nautobot_ssot/integrations/itential/api/serializers.py index a5d0b9e6c..c06fbaacc 100644 --- a/nautobot_ssot/integrations/itential/api/serializers.py +++ b/nautobot_ssot/integrations/itential/api/serializers.py @@ -7,7 +7,7 @@ from nautobot_ssot.integrations.itential import models -class AutomationGatewayModelSerializer(NautobotModelSerializer): +class AutomationGatewayModelSerializer(NautobotModelSerializer): # pylint: disable=too-many-ancestors """AutomationGatewayModel serializer.""" url = serializers.HyperlinkedIdentityField(view_name="plugins-api:nautobot_ssot-api:automationgatewaymodel-detail") diff --git a/nautobot_ssot/integrations/itential/api/views.py b/nautobot_ssot/integrations/itential/api/views.py index 34101dc7c..4575158e8 100644 --- a/nautobot_ssot/integrations/itential/api/views.py +++ b/nautobot_ssot/integrations/itential/api/views.py @@ -6,7 +6,7 @@ from nautobot_ssot.integrations.itential.api import serializers -class AutomationGatewayModelViewSet(NautobotModelViewSet): +class AutomationGatewayModelViewSet(NautobotModelViewSet): # pylint: disable=too-many-ancestors """AutomationGatewayModel API ViewSet.""" queryset = models.AutomationGatewayModel.objects.all() diff --git a/nautobot_ssot/integrations/itential/clients.py b/nautobot_ssot/integrations/itential/clients.py index 836bb7671..2c691e75f 100644 --- a/nautobot_ssot/integrations/itential/clients.py +++ b/nautobot_ssot/integrations/itential/clients.py @@ -1,14 +1,15 @@ """Itential SSoT API Clients.""" +from typing import List, Optional, Union + import requests from retry import retry -from typing import List, Optional, Union from nautobot_ssot.integrations.itential.constants import BACKOFF, DELAY, RETRIES -class AutomationGatewayClient: +class AutomationGatewayClient: # pylint: disable=too-many-instance-attributes """Itential Automation Gateway API Client.""" def __init__( @@ -19,7 +20,7 @@ def __init__( job: object, verify_ssl: Optional[bool] = True, api_version: Optional[str] = "v2.0", - ): + ): # pylint: disable=too-many-arguments """Initialize the API client. Args: @@ -42,7 +43,6 @@ def __init__( def __enter__(self): """Context manager setup.""" self.login() - return def __exit__(self, exc_type, exc_value, traceback): """Context manager teardown.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index a8b15e9e0..4bac1d47e 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -1,6 +1,8 @@ """Itential SSoT adapters.""" + from diffsync import DiffSync + from nautobot_ssot.integrations.itential.diffsync.models.itential import ItentialAnsibleDeviceModel from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index c9ae20a78..5bae7b2fc 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -5,11 +5,11 @@ from diffsync import DiffSync -from nautobot_ssot.integrations.itential.diffsync.models.nautobot import NautobotAnsibleDeviceModel - from nautobot.extras.models import Status from nautobot.dcim.models import Device, Location +from nautobot_ssot.integrations.itential.diffsync.models.nautobot import NautobotAnsibleDeviceModel + class NautobotAnsibleDeviceAdapter(DiffSync): """Nautobot => Itential Ansible Device Diffsync Adapter.""" @@ -17,7 +17,7 @@ class NautobotAnsibleDeviceAdapter(DiffSync): device = NautobotAnsibleDeviceModel top_level = ["device"] - def __init__( + def __init__( # pylint disable=too-many-arguments self, job: object, sync: object, location: Location, location_descendants: bool, status: Status, *args, **kwargs ): """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" @@ -77,8 +77,10 @@ def load(self): self.add(_device) else: - raise Exception(f"{nb_device.name} is not RFC 1123 compliant.") - except Exception as exc: + raise Exception( + f"{nb_device.name} is not RFC 1123 compliant." + ) # pylint: disable=broad-exception-raised + except Exception as exc: # pylint: disable=broad-exception-caught stacktrace = traceback.format_exc() self.job.logger.warning(f"{nb_device.name} was not added to inventory due to an error.") self.job.logger.warning( diff --git a/nautobot_ssot/integrations/itential/diffsync/models/__init__.py b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py index 29a192ccc..c2bff3a79 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py @@ -1 +1,16 @@ """Itential SSoT diffsync models.""" + + +from typing import Optional +from diffsync import DiffSyncModel + + +class SharedAnsibleDeviceDiffsyncModel(DiffSyncModel): + """Itential Ansible Device DiffSyncModel.""" + + _modelname = "device" + _identifiers = ("name",) + _attributes = ("variables",) + + name: str + variables: Optional[dict] diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index e6ec8d286..300823f48 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -1,18 +1,11 @@ """Itential SSoT models.""" -from diffsync import DiffSyncModel -from typing import Optional +from nautobot_ssot.integrations.itential.diffsync.models import SharedAnsibleDeviceDiffsyncModel -class ItentialAnsibleDeviceModel(DiffSyncModel): - """Itential Ansible Device DiffSyncModel.""" - - _modelname = "device" - _identifiers = ("name",) - _attributes = ("variables",) - name: str - variables: Optional[dict] +class ItentialAnsibleDeviceModel(SharedAnsibleDeviceDiffsyncModel): + """Itential Ansible Device DiffSyncModel.""" @classmethod def create(cls, diffsync, ids, attrs): diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py index 26ce76f26..e2119f259 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -1,15 +1,8 @@ """Itential SSoT Nautobot models.""" -from diffsync import DiffSyncModel -from typing import Optional +from nautobot_ssot.integrations.itential.diffsync.models import SharedAnsibleDeviceDiffsyncModel -class NautobotAnsibleDeviceModel(DiffSyncModel): - """Nautobot => Itential Ansible Device DiffSyncModel.""" - - _modelname = "device" - _identifiers = ("name",) - _attributes = ("variables",) - name: str - variables: Optional[dict] +class NautobotAnsibleDeviceModel(SharedAnsibleDeviceDiffsyncModel): + """Nautobot => Itential Ansible Device DiffSyncModel.""" diff --git a/nautobot_ssot/integrations/itential/forms.py b/nautobot_ssot/integrations/itential/forms.py index 46d326f7d..ce59ce0cb 100644 --- a/nautobot_ssot/integrations/itential/forms.py +++ b/nautobot_ssot/integrations/itential/forms.py @@ -33,7 +33,7 @@ class Meta: enabled = forms.BooleanField(required=False) -class AutomationGatewayModelForm(NautobotModelForm): +class AutomationGatewayModelForm(NautobotModelForm): # pylint: disable=too-many-ancestors """AutomationGatewayModel Form form.""" class Meta: diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index 7513e0565..abc95d068 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -7,8 +7,8 @@ from nautobot.extras.models import Status from nautobot.extras.jobs import ObjectVar -from nautobot_ssot.jobs.base import DataTarget from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot_ssot.jobs.base import DataTarget from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient @@ -19,7 +19,7 @@ name = "SSoT - Itential" # pylint: disable=invalid-name -class ItentialAutomationGatewayDataTarget(DataTarget): +class ItentialAutomationGatewayDataTarget(DataTarget): # pylint: disable=too-many-instance-attributes """Job syncing Nautobot to Itential Automation Gateway.""" gateway = ObjectVar(model=AutomationGatewayModel, description="Choose a gateway to sync to.", required=True) @@ -46,7 +46,7 @@ def load_source_adapter(self): self.logger.info("Loading data from Nautobot.") self.source_adapter.load() - def load_target_adapter(self, api_client: AutomationGatewayClient): + def load_target_adapter(self, api_client: AutomationGatewayClient): # pylint: disable=arguments-differ """Load Itential adapter.""" self.target_adapter = ItentialAnsibleDeviceAdapter(job=self, sync=self.sync, api_client=api_client) self.logger.info("Loading data from Itential.") @@ -140,8 +140,8 @@ def run(self, dryrun, memory_profiling, gateway, status, *args, **kwargs): # py """Execute sync.""" self.gateway = gateway self.status = status - self.location = self.gateway.location - self.location_descendants = self.gateway.location_descendants + self.location = self.gateway.location # pylint: disable=attribute-defined-outside-init + self.location_descendants = self.gateway.location_descendants # pylint: disable=attribute-defined-outside-init self.dryrun = dryrun self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index 6b195275d..8ff0f0b53 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -28,6 +28,9 @@ def setUp(self): for device in gateways.gateways: os.environ[device.get("username_env")] = "testUser" os.environ[device.get("password_env")] = "testPass" + os.environ[device.get("ansible_vault_env")] = "testAnsibleVaultKey" + os.environ[device.get("device_user_env")] = "testDeviceUser" + os.environ[device.get("device_pass_env")] = "testDevicePass" gateways.update_or_create_automation_gateways( name=device.get("name"), @@ -38,6 +41,9 @@ def setUp(self): enabled=device.get("enabled"), username_env=device.get("username_env"), password_env=device.get("password_env"), + ansible_vault_env=device.get("ansible_vault_env"), + device_user_env=device.get("device_user_env"), + device_pass_env=device.get("device_pass_env"), secret_group=device.get("secret_group"), ) @@ -94,6 +100,9 @@ def setUp(self): for device in gateways.gateways: os.environ[device.get("username_env")] = "testUser" os.environ[device.get("password_env")] = "testPass" + os.environ[device.get("ansible_vault_env")] = "testAnsibleVaultKey" + os.environ[device.get("device_user_env")] = "testDeviceUser" + os.environ[device.get("device_pass_env")] = "testDevicePass" gateways.update_or_create_automation_gateways( name=device.get("name"), @@ -104,6 +113,9 @@ def setUp(self): enabled=device.get("enabled"), username_env=device.get("username_env"), password_env=device.get("password_env"), + ansible_vault_env=device.get("ansible_vault_env"), + device_user_env=device.get("device_user_env"), + device_pass_env=device.get("device_pass_env"), secret_group=device.get("secret_group"), ) diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index f5123a8a0..f46bd8c9a 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -89,11 +89,11 @@ def update_or_create_device_object( interface: str, ip_address: str, config_context: dict = {}, -): +): # pylint: disable=dangerous-default-value,too-many-arguments,too-many-locals """Create or update device fixtures.""" status = Status.objects.get(name=status) namespace, _ = Namespace.objects.get_or_create(name="Global") - ip_prefix, _ = Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) + Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) device_content_type = ContentType.objects.get_for_model(Device) role, role_changed = Role.objects.update_or_create(name=role) add_content_type(model=role, content_type=device_content_type, changed=role_changed) diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index f11abecf8..9182cfc9f 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -15,6 +15,9 @@ "enabled": True, "username_env": "IAG1_USERNAME", "password_env": "IAG1_PASSWORD", + "ansible_vault_env": "IAG1_VAULT", + "device_user_env": "IAG1_DEVICE_USER", + "device_pass_env": "IAG1_DEVICE_PASS", "secret_group": "testGroup1", }, { @@ -25,6 +28,9 @@ "enabled": False, "username_env": "IAG1_USERNAME", "password_env": "IAG1_PASSWORD", + "ansible_vault_env": "IAG1_VAULT", + "device_user_env": "IAG1_DEVICE_USER", + "device_pass_env": "IAG1_DEVICE_PASS", "secret_group": "testGroup1", }, { @@ -35,6 +41,9 @@ "enabled": True, "username_env": "IAG2_USERNAME", "password_env": "IAG2_PASSWORD", + "ansible_vault_env": "IAG1_VAULT", + "device_user_env": "IAG1_DEVICE_USER", + "device_pass_env": "IAG1_DEVICE_PASS", "secret_group": "testGroup2", }, ] @@ -147,8 +156,11 @@ def update_or_create_automation_gateways( enabled: bool, username_env: str, password_env: str, + ansible_vault_env: str, + device_user_env: str, + device_pass_env: str, secret_group: str, -): +): # pylint: disable=too-many-arguments,too-many-locals """Fixture to populate Automation Gateways.""" # Fetch the active status status = Status.objects.get(name="Active") @@ -169,11 +181,26 @@ def update_or_create_automation_gateways( name=password_env, provider="environment-variable", parameters={"variable": password_env} ) + # Create Ansible VAULT secret + ansible_vault, _ = Secret.objects.update_or_create( + name=ansible_vault_env, provider="environment-variable", parameters={"variable:": ansible_vault_env} + ) + + # Create Device user secret + device_user, _ = Secret.objects.update_or_create( + name=device_user_env, provider="environment-variable", parameters={"variable:": device_user_env} + ) + + # Create Device pass secret + device_pass, _ = Secret.objects.update_or_create( + name=device_pass_env, provider="environment-variable", parameters={"variable:": device_pass_env} + ) + # Create a secrets group secret_group, _ = SecretsGroup.objects.update_or_create(name=secret_group) # Associate the REST username with the secrets group - username_assoc, _ = SecretsGroupAssociation.objects.update_or_create( + SecretsGroupAssociation.objects.update_or_create( secrets_group=secret_group, secret=secret_username, access_type=SecretsGroupAccessTypeChoices.TYPE_REST, @@ -181,17 +208,41 @@ def update_or_create_automation_gateways( ) # Associate the REST password with the secrets group - password_assoc, _ = SecretsGroupAssociation.objects.update_or_create( + SecretsGroupAssociation.objects.update_or_create( secrets_group=secret_group, secret=secret_password, access_type=SecretsGroupAccessTypeChoices.TYPE_REST, secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, ) + # Associate the Ansible Vault secret with the secrets group + SecretsGroupAssociation.objects.update_or_create( + secrets_group=secret_group, + secret=ansible_vault, + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_KEY, + ) + + # Associate the Device username with the secrets group + SecretsGroupAssociation.objects.update_or_create( + secrets_group=secret_group, + secret=device_user, + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + + # Associate the Device password with the secrets group + SecretsGroupAssociation.objects.update_or_create( + secrets_group=secret_group, + secret=device_pass, + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + # Create the external integration gateway, _ = ExternalIntegration.objects.update_or_create(name=name, remote_url=gateway, secrets_group=secret_group) # Create the Automation Gateway object - automation_gateway, _ = AutomationGatewayModel.objects.update_or_create( + AutomationGatewayModel.objects.update_or_create( name=name, description=description, location=location, gateway=gateway, enabled=enabled ) diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index 9e1ad5cb0..14d19e489 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -10,59 +10,59 @@ class AutomationGatewayClientTestCase(ItentialSSoTBaseTestCase): def test_login__success(self): """Test API client login.""" response = self.client.login() - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("login")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("login")) def test_get_devices__success(self): """Test get_devices.""" response = self.client.get_devices() - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_devices")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_devices")) def test_get_device__success(self): """Test get_device.""" response = self.client.get_device(device_name="rtr1.example.net") - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_device")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_device")) def test_create_device__success(self): """Test create_device.""" response = self.client.create_device(device_name="rtr10.example.net", variables={}) - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("create_device")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("create_device")) def test_update_device__success(self): """Test update_device.""" response = self.client.update_device(device_name="rtr10.example.net", variables={}) - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("update_device")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("update_device")) def test_delete_device__success(self): """Test delete_device.""" response = self.client.delete_device(device_name="rtr10.example.net") - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("delete_device")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("delete_device")) def test_get_groups__success(self): """Test get_groups.""" response = self.client.get_groups() - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_groups")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_groups")) def test_get_group__success(self): """Test get_group.""" response = self.client.get_group(group_name="all") - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("get_group")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_group")) def test_create_group__success(self): """Test create_group.""" response = self.client.create_group(group_name="test-group", variables={}) - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("create_group")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("create_group")) def test_update_group__success(self): """Test update_group.""" response = self.client.update_group(group_name="test-group", variables={}) - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("update_group")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("update_group")) def test_delete_group__success(self): """Test delete_group.""" response = self.client.delete_group(group_name="test-group") - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("delete_group")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("delete_group")) def test_logout__success(self): """Test API client logout.""" response = self.client.logout() - self.assertEquals(response, gateways.responses["iag1"]["responses"].get("logout")) + self.assertEqual(response, gateways.responses["iag1"]["responses"].get("logout")) From 4a2b26ce4d11b61b862b724db516e1782843a231 Mon Sep 17 00:00:00 2001 From: jtdub Date: Thu, 18 Apr 2024 15:04:06 -0500 Subject: [PATCH 202/229] update how diffsync uses location model and add default ansible group sync --- changes/432.added | 5 ++ docs/admin/integrations/index.md | 1 + docs/admin/integrations/itential_setup.md | 25 ++++++++++ .../itential/diffsync/adapters/itential.py | 20 ++++++-- .../itential/diffsync/adapters/nautobot.py | 48 +++++++++++++++---- .../itential/diffsync/models/__init__.py | 15 ------ .../itential/diffsync/models/itential.py | 19 +++++++- .../itential/diffsync/models/nautobot.py | 8 +++- .../itential/diffsync/models/shared.py | 27 +++++++++++ nautobot_ssot/integrations/itential/jobs.py | 3 +- nautobot_ssot/tests/itential/fixtures/base.py | 4 +- .../tests/itential/fixtures/gateways.py | 12 ++--- nautobot_ssot/tests/itential/test_jobs.py | 2 +- 13 files changed, 145 insertions(+), 44 deletions(-) create mode 100644 changes/432.added create mode 100644 docs/admin/integrations/itential_setup.md create mode 100644 nautobot_ssot/integrations/itential/diffsync/models/shared.py diff --git a/changes/432.added b/changes/432.added new file mode 100644 index 000000000..ab8967c16 --- /dev/null +++ b/changes/432.added @@ -0,0 +1,5 @@ +Added an SSoT to sync Nautobot ==> Itential Automation Gateway. + +This integration allows users to sync Nautobot device inventory to Itential Automation Gateway(s) (IAG). +The current IAG inventory that is supported is its default Ansible inventory. +Netmiko, Nornir, HTTP requests inventories will be added at a later date. \ No newline at end of file diff --git a/docs/admin/integrations/index.md b/docs/admin/integrations/index.md index 292f11a17..1f28b95e8 100644 --- a/docs/admin/integrations/index.md +++ b/docs/admin/integrations/index.md @@ -7,4 +7,5 @@ This Nautobot app supports the following integrations: - [Device42](./device42_setup.md) - [Infoblox](./infoblox_setup.md) - [IPFabric](./ipfabric_setup.md) +- [Itential](./itential_setup.md) - [ServiceNow](./servicenow_setup.md) diff --git a/docs/admin/integrations/itential_setup.md b/docs/admin/integrations/itential_setup.md new file mode 100644 index 000000000..85826a302 --- /dev/null +++ b/docs/admin/integrations/itential_setup.md @@ -0,0 +1,25 @@ +# Itential Integration Setup + +This guide will walk you through steps to set up Itential integration with the `nautobot_ssot` app. + +## Prerequisites + +Before configuring the integration, please ensure, that `nautobot-ssot` app was [installed with the Itential integration extra dependencies](../install.md#install-guide). + +```shell +pip install nautobot-ssot[itential] +``` + +## Configuration + +The Itential integration leverages the [External Integrations](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/?h=external) and [Secrets](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/secret/?h=secrets) heavily to configure the integration. The only change that is required to be made in `nautobot_config.py` is to enable the integration. + +Below is an example snippet from `nautobot_config.py` that demonstrates how to enable the Itential integration: + +```python +PLUGINS_CONFIG = { + "nautobot_ssot": { + "enable_itential": True, + } +} +``` \ No newline at end of file diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index 4bac1d47e..6227a46d8 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -3,7 +3,10 @@ from diffsync import DiffSync -from nautobot_ssot.integrations.itential.diffsync.models.itential import ItentialAnsibleDeviceModel +from nautobot_ssot.integrations.itential.diffsync.models.itential import ( + ItentialAnsibleDeviceModel, + ItentialDefaultAnsibleGroupModel, +) from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient @@ -11,7 +14,8 @@ class ItentialAnsibleDeviceAdapter(DiffSync): """Itential Ansible Device Diffsync adapter.""" device = ItentialAnsibleDeviceModel - top_level = ["device"] + all_group = ItentialDefaultAnsibleGroupModel + top_level = ["all_group", "device"] def __init__(self, api_client: AutomationGatewayClient, job: object, sync: object, *args, **kwargs): """Initialize Diffsync Adapter.""" @@ -22,11 +26,19 @@ def __init__(self, api_client: AutomationGatewayClient, job: object, sync: objec def load(self): """Load Adapter.""" + self.job.logger.info(f"Loading default ansible group variables from {self.api_client.host}.") + groups = self.api_client.get_groups().get("data") + + for iag_group in groups: + if iag_group.get("name") == "all": + _group = self.all_group(name=iag_group.get("name"), variables=iag_group.get("variables")) + + self.add(_group) + self.job.logger.info(f"Loading Itential devices from {self.api_client.host} into Diffsync adapter.") devices = self.api_client.get_devices().get("data") for iag_device in devices: - device_vars = iag_device.get("variables") - _device = self.device(name=iag_device.get("name"), variables=device_vars) + _device = self.device(name=iag_device.get("name"), variables=iag_device.get("variables")) self.add(_device) diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index 5bae7b2fc..173c32b5e 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -6,26 +6,32 @@ from diffsync import DiffSync from nautobot.extras.models import Status -from nautobot.dcim.models import Device, Location +from nautobot.dcim.models import Device -from nautobot_ssot.integrations.itential.diffsync.models.nautobot import NautobotAnsibleDeviceModel +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices + +from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +from nautobot_ssot.integrations.itential.diffsync.models.nautobot import ( + NautobotAnsibleDeviceModel, + NautobotDefaultAnsibleGroupModel, +) class NautobotAnsibleDeviceAdapter(DiffSync): """Nautobot => Itential Ansible Device Diffsync Adapter.""" device = NautobotAnsibleDeviceModel - top_level = ["device"] + all_group = NautobotDefaultAnsibleGroupModel + top_level = ["all_group", "device"] def __init__( # pylint disable=too-many-arguments - self, job: object, sync: object, location: Location, location_descendants: bool, status: Status, *args, **kwargs + self, job: object, sync: object, gateway: AutomationGatewayModel, status: Status, *args, **kwargs ): """Initialize Nautobot Itential Ansible Device Diffsync adapter.""" super().__init__(*args, **kwargs) self.job = job self.sync = sync - self.location = location - self.location_descendants = location_descendants + self.gateway = gateway self.status = status def _is_rfc1123_compliant(self, device_name: str) -> bool: @@ -60,11 +66,33 @@ def _ansible_vars(self, device_obj: Device) -> dict: return {**ansible_host, **ansible_network_os, **config_context} + @property + def _default_group_vars(self) -> dict: + """Create the ansible default group variables to load into Automation Gateway.""" + username = self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + password = self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + + ansible_username = {"ansible_username": username} if username else {} + ansible_password = {"ansible_passwod": password} if password else {} + + return {**ansible_username, **ansible_password} + def load(self): """Load Nautobot Diffsync adapter.""" + + self.job.logger.info("Loading default ansible group variables from Nautobot.") + _group = self.all_group(name="all", variables=self._default_group_vars) + self.add(_group) + self.job.logger.info("Loading locations from Nautobot.") - location = Location.objects.get(name=self.location) - locations = location.descendants(include_self=True) if self.location_descendants else location + location = self.gateway.location + locations = location.descendants(include_self=True) if self.gateway.location_descendants else location self.job.logger.info("Loading devices from Nautobot.") devices = Device.objects.filter(location__in=locations, status=self.status.pk).exclude(primary_ip4=None) @@ -77,9 +105,9 @@ def load(self): self.add(_device) else: - raise Exception( + raise Exception( # pylint: disable=broad-exception-raised f"{nb_device.name} is not RFC 1123 compliant." - ) # pylint: disable=broad-exception-raised + ) except Exception as exc: # pylint: disable=broad-exception-caught stacktrace = traceback.format_exc() self.job.logger.warning(f"{nb_device.name} was not added to inventory due to an error.") diff --git a/nautobot_ssot/integrations/itential/diffsync/models/__init__.py b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py index c2bff3a79..29a192ccc 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/__init__.py @@ -1,16 +1 @@ """Itential SSoT diffsync models.""" - - -from typing import Optional -from diffsync import DiffSyncModel - - -class SharedAnsibleDeviceDiffsyncModel(DiffSyncModel): - """Itential Ansible Device DiffSyncModel.""" - - _modelname = "device" - _identifiers = ("name",) - _attributes = ("variables",) - - name: str - variables: Optional[dict] diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index 300823f48..b3be79810 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -1,10 +1,10 @@ """Itential SSoT models.""" -from nautobot_ssot.integrations.itential.diffsync.models import SharedAnsibleDeviceDiffsyncModel +from nautobot_ssot.integrations.itential.diffsync.models import shared -class ItentialAnsibleDeviceModel(SharedAnsibleDeviceDiffsyncModel): +class ItentialAnsibleDeviceModel(shared.SharedAnsibleDeviceDiffsyncModel): """Itential Ansible Device DiffSyncModel.""" @classmethod @@ -24,3 +24,18 @@ def update(self, attrs): """Update device in Automation Gateway.""" self.diffsync.api_client.update_device(device_name=self.name, variables=attrs.get("variables")) return super().update(attrs) + + +class ItentialDefaultAnsibleGroupModel(shared.SharedAnsibleDefaultGroupDiffsyncModel): + """Itential Default Ansible Group DiffsyncModel.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create default group in Automation Gateway.""" + diffsync.api_client.create_group(group_name=ids.get("name"), variables=attrs.get("variables")) + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update default group in Automation Gateway.""" + self.diffsync.api_client.update_device(device_name=self.name, variables=attrs.get("variables")) + return super().update(attrs) diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py index e2119f259..0ff87b8b1 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -1,8 +1,12 @@ """Itential SSoT Nautobot models.""" -from nautobot_ssot.integrations.itential.diffsync.models import SharedAnsibleDeviceDiffsyncModel +from nautobot_ssot.integrations.itential.diffsync.models import shared -class NautobotAnsibleDeviceModel(SharedAnsibleDeviceDiffsyncModel): +class NautobotAnsibleDeviceModel(shared.SharedAnsibleDeviceDiffsyncModel): """Nautobot => Itential Ansible Device DiffSyncModel.""" + + +class NautobotDefaultAnsibleGroupModel(shared.SharedAnsibleDefaultGroupDiffsyncModel): + """Nautobot => Itential Default Ansible Group DiffsyncModel.""" diff --git a/nautobot_ssot/integrations/itential/diffsync/models/shared.py b/nautobot_ssot/integrations/itential/diffsync/models/shared.py new file mode 100644 index 000000000..611d60c96 --- /dev/null +++ b/nautobot_ssot/integrations/itential/diffsync/models/shared.py @@ -0,0 +1,27 @@ +"""Itential SSoT shared diffsync models.""" + + +from typing import Optional +from diffsync import DiffSyncModel + + +class SharedAnsibleDeviceDiffsyncModel(DiffSyncModel): + """Itential Ansible Device DiffSyncModel.""" + + _modelname = "device" + _identifiers = ("name",) + _attributes = ("variables",) + + name: str + variables: Optional[dict] + + +class SharedAnsibleDefaultGroupDiffsyncModel(DiffSyncModel): + """Itential Default Ansible Group DiffsyncModel.""" + + _modelname = "all_group" + _identifiers = ("name",) + _attributes = ("variables",) + + name: str + variables: Optional[dict] diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index abc95d068..a9f66806f 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -39,8 +39,7 @@ def load_source_adapter(self): self.source_adapter = NautobotAnsibleDeviceAdapter( job=self, sync=self.sync, - location=self.location, - location_descendants=self.location_descendants, + gateway=self.gateway, status=self.status, ) self.logger.info("Loading data from Nautobot.") diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index 8ff0f0b53..058dcb007 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -77,7 +77,7 @@ def setUp(self): self.client = clients.api_client(self.gateway) self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( - job=self.job, location="North America", location_descendants=True, status=self.status, sync=None + job=self.job, gateway=self.gateway, status=self.status, sync=None ) self.itential_adapter.load() @@ -149,7 +149,7 @@ def setUp(self): self.client = clients.api_client(self.gateway) self.itential_adapter = itential.ItentialAnsibleDeviceAdapter(api_client=self.client, job=self.job, sync=None) self.nautobot_adapter = nautobot.NautobotAnsibleDeviceAdapter( - job=self.job, location="North America", location_descendants=True, status=self.status, sync=None + job=self.job, gateway=self.gateway, status=self.status, sync=None ) self.itential_adapter.load() diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 9182cfc9f..483805447 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -41,9 +41,9 @@ "enabled": True, "username_env": "IAG2_USERNAME", "password_env": "IAG2_PASSWORD", - "ansible_vault_env": "IAG1_VAULT", - "device_user_env": "IAG1_DEVICE_USER", - "device_pass_env": "IAG1_DEVICE_PASS", + "ansible_vault_env": "IAG2_VAULT", + "device_user_env": "IAG2_DEVICE_USER", + "device_pass_env": "IAG2_DEVICE_PASS", "secret_group": "testGroup2", }, ] @@ -183,17 +183,17 @@ def update_or_create_automation_gateways( # Create Ansible VAULT secret ansible_vault, _ = Secret.objects.update_or_create( - name=ansible_vault_env, provider="environment-variable", parameters={"variable:": ansible_vault_env} + name=ansible_vault_env, provider="environment-variable", parameters={"variable": ansible_vault_env} ) # Create Device user secret device_user, _ = Secret.objects.update_or_create( - name=device_user_env, provider="environment-variable", parameters={"variable:": device_user_env} + name=device_user_env, provider="environment-variable", parameters={"variable": device_user_env} ) # Create Device pass secret device_pass, _ = Secret.objects.update_or_create( - name=device_pass_env, provider="environment-variable", parameters={"variable:": device_pass_env} + name=device_pass_env, provider="environment-variable", parameters={"variable": device_pass_env} ) # Create a secrets group diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index a1cab9d5e..913377cf4 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -25,7 +25,7 @@ def test_job__success(self): log_entries = JobLogEntry.objects.filter(job_result=job_result) self.assertGreater(log_entries.count(), 1) log_entries = [log_entry.message for log_entry in log_entries] - summary_output = "{'create': 1, 'update': 1, 'delete': 1, 'no-change': 1, 'skip': 0}" + summary_output = "{'create': 2, 'update': 1, 'delete': 1, 'no-change': 1, 'skip': 0}" self.assertIn(summary_output, log_entries) self.assertIn("Sync complete", log_entries) From 715768a6105c95e0ac5dd25c5e03a09bbb52f8d9 Mon Sep 17 00:00:00 2001 From: jtdub Date: Thu, 18 Apr 2024 16:15:16 -0500 Subject: [PATCH 203/229] add documentation --- docs/admin/integrations/itential_setup.md | 42 ++++++++++++++- .../itential/diffsync/adapters/nautobot.py | 52 ++++++++++++++----- 2 files changed, 79 insertions(+), 15 deletions(-) diff --git a/docs/admin/integrations/itential_setup.md b/docs/admin/integrations/itential_setup.md index 85826a302..286e3a124 100644 --- a/docs/admin/integrations/itential_setup.md +++ b/docs/admin/integrations/itential_setup.md @@ -12,7 +12,7 @@ pip install nautobot-ssot[itential] ## Configuration -The Itential integration leverages the [External Integrations](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/?h=external) and [Secrets](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/secret/?h=secrets) heavily to configure the integration. The only change that is required to be made in `nautobot_config.py` is to enable the integration. +The integration with Itential primarily utilizes the [External Integrations](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/?h=external) and [Secrets](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/secret/?h=secrets) features within Nautobot to set up the integration. To enable this integration, the only modification needed is to activate it in the nautobot_config.py file. Below is an example snippet from `nautobot_config.py` that demonstrates how to enable the Itential integration: @@ -22,4 +22,42 @@ PLUGINS_CONFIG = { "enable_itential": True, } } -``` \ No newline at end of file +``` + +Remaining configurations are performed in the Nautobot UI or through the Nautobot API. + +### Secrets + +The Itential integration necessitates four secret values: (1) Itential API access username, (2) Itential API access password, (3) network device access username, and (4) network device access password. You can store these secrets using the secrets provider of your choice. + +### Secrets Group + +When assigning secrets to a secrets group, please refer to the table below to correctly assign each secret to its respective access type and secret type. + +| Secret Description | Access Type | Secret Type | ++-----------------------+-------------+-------------+ +| Itnetial API username | REST | Username | +| Itential API password | REST | Password | +| Device username | GENERIC | Username | +| Device password | GENERIC | Password | + +### External Integration + +When setting up an external integration, you must provide the following required fields: + +1. **Name**: The unique identifier for the integration. +2. **Remote URL**: The endpoint URL, including the protocol and port, if applicable. +3. **Verify SSL**: A boolean value indicating whether SSL certificates should be verified. +4. **Secrets Group**: The group of secrets associated with the integration, containing necessary authentication details. + +The remote URL must include both the protocol (either http or https) and the TCP port used by the automation gateway. For example, to access the automation gateway, you would enter a URL like: https://iag.example.com:8443. + +### Automation Gateway Management + +To manage the Automation Gateway, navigate to Plugins -> Single Source of Truth -> Itential Automation Gateway in your application. From this interface, you can input details about the automation gateway, which include: + +1. **Name**: Specify the name of the automation gateway. +2. **Description**: Provide a brief description of what the automation gateway is used for. +3. **Location**: Indicate the primary location of the devices managed by the automation gateway. +4. **Location Descendants**: This boolean value determines whether the automation gateway should also manage devices in child locations of the specified primary location. +5. **Enabled**: This boolean setting allows you to enable or disable inventory synchronization with the automation gateway. diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py index 173c32b5e..39e7fd794 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/nautobot.py @@ -56,36 +56,62 @@ def _is_rfc1123_compliant(self, device_name: str) -> bool: def _ansible_vars(self, device_obj: Device) -> dict: """Create device variables to load into Automation Gateway.""" + # Add ansible_network_os if available if device_obj.platform and device_obj.platform.network_driver_mappings.get("ansible"): ansible_network_os = {"ansible_network_os": device_obj.platform.network_driver_mappings.get("ansible")} else: ansible_network_os = {} + # Add device specific credentials if available + try: + ansible_username = { + "ansible_username": device_obj.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + } + ansible_password = { + "ansible_password": device_obj.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + } + except AttributeError: + ansible_username = {} + ansible_password = {} + + # Add ansible_host ansible_host = {"ansible_host": device_obj.primary_ip4.host} + + # Add device attributes from config_context config_context = device_obj.get_config_context() - return {**ansible_host, **ansible_network_os, **config_context} + return {**ansible_host, **ansible_network_os, **ansible_username, **ansible_password, **config_context} @property def _default_group_vars(self) -> dict: """Create the ansible default group variables to load into Automation Gateway.""" - username = self.gateway.gateway.secrets_group.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, - ) - password = self.gateway.gateway.secrets_group.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, - ) - - ansible_username = {"ansible_username": username} if username else {} - ansible_password = {"ansible_passwod": password} if password else {} + try: + ansible_username = { + "ansible_username": self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + } + ansible_password = { + "ansible_password": self.gateway.gateway.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + } + except AttributeError: + ansible_username = {} + ansible_password = {} return {**ansible_username, **ansible_password} def load(self): """Load Nautobot Diffsync adapter.""" - self.job.logger.info("Loading default ansible group variables from Nautobot.") _group = self.all_group(name="all", variables=self._default_group_vars) self.add(_group) From 673741df688280436f1cd035b00df785f5ad81be Mon Sep 17 00:00:00 2001 From: jtdub Date: Thu, 18 Apr 2024 16:17:03 -0500 Subject: [PATCH 204/229] update readme --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index b287c9fae..00e420853 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ This Nautobot application framework includes the following integrations: - Device42 - Infoblox - IPFabric +- Itential - ServiceNow Read more about integrations [here](https://docs.nautobot.com/projects/ssot/en/latest/user/integrations). To enable and configure integrations follow the instructions from [the install guide](https://docs.nautobot.com/projects/ssot/en/latest/admin/install/#integrations-configuration). @@ -79,6 +80,7 @@ The SSoT framework includes a number of integrations with external Systems of Re * Arista CloudVision * Device42 * Infoblox +* Itential * ServiceNow > Note that the Arista CloudVision integration is currently incompatible with the [Arista Labs](https://labs.arista.com/) environment due to a TLS issue. It has been confirmed to work in on-prem environments previously. From 8e2dc5f6bda1be1ab8b1e4c8b29f0fd8415120a3 Mon Sep 17 00:00:00 2001 From: jtdub Date: Thu, 18 Apr 2024 17:03:02 -0500 Subject: [PATCH 205/229] remove the sync_data override --- nautobot_ssot/integrations/itential/jobs.py | 79 ++------------------- 1 file changed, 4 insertions(+), 75 deletions(-) diff --git a/nautobot_ssot/integrations/itential/jobs.py b/nautobot_ssot/integrations/itential/jobs.py index a9f66806f..5b9199e31 100644 --- a/nautobot_ssot/integrations/itential/jobs.py +++ b/nautobot_ssot/integrations/itential/jobs.py @@ -1,9 +1,5 @@ """Itential SSoT Jobs.""" -import tracemalloc - -from datetime import datetime - from nautobot.extras.models import Status from nautobot.extras.jobs import ObjectVar @@ -45,45 +41,12 @@ def load_source_adapter(self): self.logger.info("Loading data from Nautobot.") self.source_adapter.load() - def load_target_adapter(self, api_client: AutomationGatewayClient): # pylint: disable=arguments-differ + def load_target_adapter(self): """Load Itential adapter.""" - self.target_adapter = ItentialAnsibleDeviceAdapter(job=self, sync=self.sync, api_client=api_client) - self.logger.info("Loading data from Itential.") - self.target_adapter.load() - - def sync_data(self, memory_profiling): - """Execute Nautobot ⟹ Itential Automation Gateway sync.""" - - def record_memory_trace(step: str): - """Helper function to record memory usage and reset tracemalloc stats.""" - memory_final, memory_peak = tracemalloc.get_traced_memory() - setattr(self.sync, f"{step}_memory_final", memory_final) - setattr(self.sync, f"{step}_memory_peak", memory_peak) - self.sync.save() - self.logger.info("Traced memory for %s (Final, Peak): %s bytes, %s bytes", step, memory_final, memory_peak) - tracemalloc.clear_traces() - - if not self.sync: - return - if not self.gateway.enabled: self.logger.warning(f"{self.gateway.gateway.remote_url} is not enabled to sync inventory.") return - if memory_profiling: - tracemalloc.start() - - start_time = datetime.now() - - self.load_source_adapter() - load_source_adapter_time = datetime.now() - self.sync.source_load_time = load_source_adapter_time - start_time - self.sync.save() - self.logger.info("Source Load Time from %s: %s", self.source_adapter, self.sync.source_load_time) - - if memory_profiling: - record_memory_trace("source_load") - api_client = AutomationGatewayClient( host=self.gateway.gateway.remote_url, username=self.gateway.gateway.secrets_group.get_secret_value( @@ -97,50 +60,16 @@ def record_memory_trace(step: str): job=self, verify_ssl=self.gateway.gateway.verify_ssl, ) - api_client.login() - self.load_target_adapter(api_client=api_client) - load_target_adapter_time = datetime.now() - self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time - self.sync.save() - self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) - - if memory_profiling: - record_memory_trace("target_load") - - self.logger.info("Calculating diffs...") - self.calculate_diff() - calculate_diff_time = datetime.now() - self.sync.diff_time = calculate_diff_time - load_target_adapter_time - self.sync.save() - self.logger.info("Diff Calculation Time: %s", self.sync.diff_time) - - if memory_profiling: - record_memory_trace("diff") - - if self.dryrun: - self.logger.info("As `dryrun` is set, skipping the actual data sync.") - else: - self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) - self.execute_sync() - execute_sync_time = datetime.now() - self.sync.sync_time = execute_sync_time - calculate_diff_time - self.sync.save() - self.logger.info("Sync complete") - self.logger.info("Sync Time: %s", self.sync.sync_time) - - if memory_profiling: - record_memory_trace("sync") - - api_client.logout() + self.target_adapter = ItentialAnsibleDeviceAdapter(job=self, sync=self.sync, api_client=api_client) + self.logger.info("Loading data from Itential.") + self.target_adapter.load() def run(self, dryrun, memory_profiling, gateway, status, *args, **kwargs): # pylint: disable=arguments-differ """Execute sync.""" self.gateway = gateway self.status = status - self.location = self.gateway.location # pylint: disable=attribute-defined-outside-init - self.location_descendants = self.gateway.location_descendants # pylint: disable=attribute-defined-outside-init self.dryrun = dryrun self.memory_profiling = memory_profiling super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) From f9ced70d073a9c015c14c38d45134a2b12623cd6 Mon Sep 17 00:00:00 2001 From: jtdub Date: Tue, 23 Apr 2024 14:02:24 -0500 Subject: [PATCH 206/229] updates per review --- .../diffsync/models/{shared.py => base.py} | 4 +-- .../itential/diffsync/models/itential.py | 10 ++++-- .../itential/diffsync/models/nautobot.py | 32 +++++++++++++++++-- nautobot_ssot/tests/itential/test_clients.py | 24 +++++++------- nautobot_ssot/tests/itential/test_diffsync.py | 4 +-- nautobot_ssot/tests/itential/test_jobs.py | 4 +-- pyproject.toml | 1 + 7 files changed, 55 insertions(+), 24 deletions(-) rename nautobot_ssot/integrations/itential/diffsync/models/{shared.py => base.py} (80%) diff --git a/nautobot_ssot/integrations/itential/diffsync/models/shared.py b/nautobot_ssot/integrations/itential/diffsync/models/base.py similarity index 80% rename from nautobot_ssot/integrations/itential/diffsync/models/shared.py rename to nautobot_ssot/integrations/itential/diffsync/models/base.py index 611d60c96..23c60c3a3 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/shared.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/base.py @@ -5,7 +5,7 @@ from diffsync import DiffSyncModel -class SharedAnsibleDeviceDiffsyncModel(DiffSyncModel): +class BaseAnsibleDeviceDiffsyncModel(DiffSyncModel): """Itential Ansible Device DiffSyncModel.""" _modelname = "device" @@ -16,7 +16,7 @@ class SharedAnsibleDeviceDiffsyncModel(DiffSyncModel): variables: Optional[dict] -class SharedAnsibleDefaultGroupDiffsyncModel(DiffSyncModel): +class BaseAnsibleDefaultGroupDiffsyncModel(DiffSyncModel): """Itential Default Ansible Group DiffsyncModel.""" _modelname = "all_group" diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index b3be79810..6eec6b691 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -1,10 +1,10 @@ """Itential SSoT models.""" -from nautobot_ssot.integrations.itential.diffsync.models import shared +from nautobot_ssot.integrations.itential.diffsync.models import base -class ItentialAnsibleDeviceModel(shared.SharedAnsibleDeviceDiffsyncModel): +class ItentialAnsibleDeviceModel(base.BaseAnsibleDeviceDiffsyncModel): """Itential Ansible Device DiffSyncModel.""" @classmethod @@ -26,7 +26,7 @@ def update(self, attrs): return super().update(attrs) -class ItentialDefaultAnsibleGroupModel(shared.SharedAnsibleDefaultGroupDiffsyncModel): +class ItentialDefaultAnsibleGroupModel(base.BaseAnsibleDefaultGroupDiffsyncModel): """Itential Default Ansible Group DiffsyncModel.""" @classmethod @@ -39,3 +39,7 @@ def update(self, attrs): """Update default group in Automation Gateway.""" self.diffsync.api_client.update_device(device_name=self.name, variables=attrs.get("variables")) return super().update(attrs) + + def delete(self): + """Delete default group in Automation Gateway.""" + raise NotImplementedError diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py index 0ff87b8b1..b924e288d 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -1,12 +1,38 @@ """Itential SSoT Nautobot models.""" -from nautobot_ssot.integrations.itential.diffsync.models import shared +from nautobot_ssot.integrations.itential.diffsync.models import base -class NautobotAnsibleDeviceModel(shared.SharedAnsibleDeviceDiffsyncModel): +class NautobotAnsibleDeviceModel(base.BaseAnsibleDeviceDiffsyncModel): """Nautobot => Itential Ansible Device DiffSyncModel.""" + @classmethod + def create(cls, diffsync, ids, attrs): + """Create device in Nautobot..""" + raise NotImplementedError -class NautobotDefaultAnsibleGroupModel(shared.SharedAnsibleDefaultGroupDiffsyncModel): + def update(self, attrs): + """Update device in Nautobot.""" + raise NotImplementedError + + def delete(self): + """Delete device in Nautobot.""" + raise NotImplementedError + + +class NautobotDefaultAnsibleGroupModel(base.BaseAnsibleDefaultGroupDiffsyncModel): """Nautobot => Itential Default Ansible Group DiffsyncModel.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create default group in Nautobot..""" + raise NotImplementedError + + def update(self, attrs): + """Update default group in Nautobot.""" + raise NotImplementedError + + def delete(self): + """Delete default group in Nautobot.""" + raise NotImplementedError diff --git a/nautobot_ssot/tests/itential/test_clients.py b/nautobot_ssot/tests/itential/test_clients.py index 14d19e489..e496b7036 100644 --- a/nautobot_ssot/tests/itential/test_clients.py +++ b/nautobot_ssot/tests/itential/test_clients.py @@ -7,62 +7,62 @@ class AutomationGatewayClientTestCase(ItentialSSoTBaseTestCase): """Itential Automation Gateway Client Test Cases.""" - def test_login__success(self): + def test_login_success(self): """Test API client login.""" response = self.client.login() self.assertEqual(response, gateways.responses["iag1"]["responses"].get("login")) - def test_get_devices__success(self): + def test_get_devices_success(self): """Test get_devices.""" response = self.client.get_devices() self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_devices")) - def test_get_device__success(self): + def test_get_device_success(self): """Test get_device.""" response = self.client.get_device(device_name="rtr1.example.net") self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_device")) - def test_create_device__success(self): + def test_create_device_success(self): """Test create_device.""" response = self.client.create_device(device_name="rtr10.example.net", variables={}) self.assertEqual(response, gateways.responses["iag1"]["responses"].get("create_device")) - def test_update_device__success(self): + def test_update_device_success(self): """Test update_device.""" response = self.client.update_device(device_name="rtr10.example.net", variables={}) self.assertEqual(response, gateways.responses["iag1"]["responses"].get("update_device")) - def test_delete_device__success(self): + def test_delete_device_success(self): """Test delete_device.""" response = self.client.delete_device(device_name="rtr10.example.net") self.assertEqual(response, gateways.responses["iag1"]["responses"].get("delete_device")) - def test_get_groups__success(self): + def test_get_groups_success(self): """Test get_groups.""" response = self.client.get_groups() self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_groups")) - def test_get_group__success(self): + def test_get_group_success(self): """Test get_group.""" response = self.client.get_group(group_name="all") self.assertEqual(response, gateways.responses["iag1"]["responses"].get("get_group")) - def test_create_group__success(self): + def test_create_group_success(self): """Test create_group.""" response = self.client.create_group(group_name="test-group", variables={}) self.assertEqual(response, gateways.responses["iag1"]["responses"].get("create_group")) - def test_update_group__success(self): + def test_update_group_success(self): """Test update_group.""" response = self.client.update_group(group_name="test-group", variables={}) self.assertEqual(response, gateways.responses["iag1"]["responses"].get("update_group")) - def test_delete_group__success(self): + def test_delete_group_success(self): """Test delete_group.""" response = self.client.delete_group(group_name="test-group") self.assertEqual(response, gateways.responses["iag1"]["responses"].get("delete_group")) - def test_logout__success(self): + def test_logout_success(self): """Test API client logout.""" response = self.client.logout() self.assertEqual(response, gateways.responses["iag1"]["responses"].get("logout")) diff --git a/nautobot_ssot/tests/itential/test_diffsync.py b/nautobot_ssot/tests/itential/test_diffsync.py index 4c16a867f..b80ad33c7 100644 --- a/nautobot_ssot/tests/itential/test_diffsync.py +++ b/nautobot_ssot/tests/itential/test_diffsync.py @@ -6,12 +6,12 @@ class DiffSyncTestCases(base.ItentialSSoTBaseTestCase): """DiffSync test cases.""" - def test_diff__success(self): + def test_diff_success(self): """Test diff exists.""" diff = self.nautobot_adapter.diff_to(self.itential_adapter) self.assertTrue(diff.has_diffs()) - def test_sync__success(self): + def test_sync_success(self): """Test successful sync.""" self.nautobot_adapter.sync_to(self.itential_adapter) diff = self.nautobot_adapter.diff_to(self.itential_adapter) diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 913377cf4..6d940fbeb 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -13,7 +13,7 @@ class ItentialSSoTJobsTestCase(base.ItentialSSoTBaseTransactionTestCase): databases = ("default", "job_logs") - def test_job__success(self): + def test_job_success(self): """Test successful job.""" self.job = Job.objects.get( job_class_name="ItentialAutomationGatewayDataTarget", @@ -29,7 +29,7 @@ def test_job__success(self): self.assertIn(summary_output, log_entries) self.assertIn("Sync complete", log_entries) - def test_job__disabled_gateway(self): + def test_job_disabled_gateway(self): """Test job with disabled automation gateway.""" gateway = AutomationGatewayModel.objects.get(name="IAG10") self.job = Job.objects.get( diff --git a/pyproject.toml b/pyproject.toml index 7623bfa18..0dc80ee28 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ requests = { version = ">=2.21.0", optional = true } requests-oauthlib = { version = ">=1.3.0", optional = true } six = { version = ">=1.13.0", optional = true } httpx = { version = ">=0.23.3", optional = true } +# Used by the Itential SSoT as a retry mechanism for HTTP failures in the AutomationGatewayClient. retry = "^0.9.2" [tool.poetry.group.dev.dependencies] From 49246a279d798519440c64e5d015ac0c324cfb70 Mon Sep 17 00:00:00 2001 From: James Williams Date: Tue, 18 Jun 2024 10:45:11 -0500 Subject: [PATCH 207/229] magicmock and black --- .../itential/diffsync/adapters/itential.py | 1 - .../itential/diffsync/models/base.py | 1 - .../itential/diffsync/models/itential.py | 1 - .../itential/diffsync/models/nautobot.py | 1 - nautobot_ssot/tests/itential/fixtures/base.py | 6 +- .../tests/itential/fixtures/clients.py | 6 +- .../tests/itential/fixtures/logger.py | 26 - poetry.lock | 1192 +++++++++-------- 8 files changed, 615 insertions(+), 619 deletions(-) delete mode 100644 nautobot_ssot/tests/itential/fixtures/logger.py diff --git a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py index 6227a46d8..494bb2af5 100644 --- a/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/adapters/itential.py @@ -1,6 +1,5 @@ """Itential SSoT adapters.""" - from diffsync import DiffSync from nautobot_ssot.integrations.itential.diffsync.models.itential import ( diff --git a/nautobot_ssot/integrations/itential/diffsync/models/base.py b/nautobot_ssot/integrations/itential/diffsync/models/base.py index 23c60c3a3..8bfcb1081 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/base.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/base.py @@ -1,6 +1,5 @@ """Itential SSoT shared diffsync models.""" - from typing import Optional from diffsync import DiffSyncModel diff --git a/nautobot_ssot/integrations/itential/diffsync/models/itential.py b/nautobot_ssot/integrations/itential/diffsync/models/itential.py index 6eec6b691..4eb82393b 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/itential.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/itential.py @@ -1,6 +1,5 @@ """Itential SSoT models.""" - from nautobot_ssot.integrations.itential.diffsync.models import base diff --git a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py index b924e288d..6bfef599d 100644 --- a/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/itential/diffsync/models/nautobot.py @@ -1,6 +1,5 @@ """Itential SSoT Nautobot models.""" - from nautobot_ssot.integrations.itential.diffsync.models import base diff --git a/nautobot_ssot/tests/itential/fixtures/base.py b/nautobot_ssot/tests/itential/fixtures/base.py index 058dcb007..623f854ec 100644 --- a/nautobot_ssot/tests/itential/fixtures/base.py +++ b/nautobot_ssot/tests/itential/fixtures/base.py @@ -1,6 +1,7 @@ """Itential SSoT Base TestCase.""" import os +import unittest import requests_mock # from unittest import TestCase @@ -13,7 +14,6 @@ from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from nautobot_ssot.integrations.itential.diffsync.adapters import itential, nautobot from nautobot_ssot.tests.itential.fixtures import gateways, urls, clients, devices -from nautobot_ssot.tests.itential.fixtures.logger import JobLogger class ItentialSSoTBaseTestCase(TestCase): @@ -21,7 +21,7 @@ class ItentialSSoTBaseTestCase(TestCase): def setUp(self): """Setup test cases.""" - self.job = JobLogger() + self.job = unittest.mock.MagicMock() self.requests_mock = requests_mock.Mocker() self.requests_mock.start() @@ -93,7 +93,7 @@ class ItentialSSoTBaseTransactionTestCase(TransactionTestCase): def setUp(self): """Setup test cases.""" - self.job = JobLogger() + self.job = unittest.mock.MagicMock() self.requests_mock = requests_mock.Mocker() self.requests_mock.start() diff --git a/nautobot_ssot/tests/itential/fixtures/clients.py b/nautobot_ssot/tests/itential/fixtures/clients.py index ff2fb96f8..9698590b5 100644 --- a/nautobot_ssot/tests/itential/fixtures/clients.py +++ b/nautobot_ssot/tests/itential/fixtures/clients.py @@ -1,14 +1,14 @@ """Itential SSoT API Clients fixtures.""" -from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +import unittest -from nautobot_ssot.tests.itential.fixtures import logger +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot_ssot.integrations.itential.models import AutomationGatewayModel from nautobot_ssot.integrations.itential.clients import AutomationGatewayClient -def api_client(device_obj: AutomationGatewayModel, job: object = logger.JobLogger()) -> AutomationGatewayClient: +def api_client(device_obj: AutomationGatewayModel, job: object = unittest.mock.MagicMock()) -> AutomationGatewayClient: """Initialize API Client.""" return AutomationGatewayClient( diff --git a/nautobot_ssot/tests/itential/fixtures/logger.py b/nautobot_ssot/tests/itential/fixtures/logger.py deleted file mode 100644 index d4046ed97..000000000 --- a/nautobot_ssot/tests/itential/fixtures/logger.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Itential SSoT Job Logger fixtures.""" - -import logging - - -class Logger: - """Logger.""" - - def info(self, msg: str): - """Info logging.""" - logging.info(msg) - - def warning(self, msg: str): - """Warning logging.""" - logging.warning(msg) - - def failure(self, msg: str): - """Failure logging.""" - logging.error(msg) - - -class JobLogger: - """Job Logger.""" - - def __init__(self): - self.logger = Logger() diff --git a/poetry.lock b/poetry.lock index f79e8bd3d..4ec9ce2f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -163,13 +163,13 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -213,13 +213,13 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "astroid" -version = "3.2.2" +version = "3.1.0" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, + {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, ] [package.dependencies] @@ -303,13 +303,13 @@ tomli = "*" [[package]] name = "babel" -version = "2.15.0" +version = "2.14.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.dependencies] @@ -362,13 +362,13 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.9" +version = "1.7.8" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.8" files = [ - {file = "bandit-1.7.9-py3-none-any.whl", hash = "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec"}, - {file = "bandit-1.7.9.tar.gz", hash = "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61"}, + {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, + {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, ] [package.dependencies] @@ -397,33 +397,33 @@ files = [ [[package]] name = "black" -version = "24.4.2" +version = "24.4.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, ] [package.dependencies] @@ -499,13 +499,13 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -753,28 +753,28 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] [[package]] name = "cloudvision" -version = "1.20.0" +version = "1.19.1" description = "A Python library for Arista's CloudVision APIs and Provisioning Action integrations." optional = true python-versions = ">=3.7.0" files = [ - {file = "cloudvision-1.20.0-py3-none-any.whl", hash = "sha256:ccdc8116184d808226a1cbb8c678a6e4f338d67537d469c41231e37cb31aa00f"}, - {file = "cloudvision-1.20.0.tar.gz", hash = "sha256:33760b93aa1d4267acd195432cbb5b6228e6c00d965b9127d6d7ec276c70badf"}, + {file = "cloudvision-1.19.1-py3-none-any.whl", hash = "sha256:76249cd7054f515f0e20084489beecccfa83d48563b5db11a6f2493ed6e54a52"}, + {file = "cloudvision-1.19.1.tar.gz", hash = "sha256:f865cc7a5e324fbd6259454f5e2b056c381566349e797e0ddce2131b52d971e1"}, ] [package.dependencies] cryptography = ">=42.0.4,<43.0.0" -grpcio = ">=1.53.0" +grpcio = ">=1.46.0" msgpack = ">=1.0.3" -protobuf = ">=4.22.5,<5.0" +protobuf = ">=3.20.1,<4.0" requests = ">=2.20.1" -types-protobuf = ">=3.20.4.6,<4.0" +types-protobuf = ">=3.20.1,<4.0" types-PyYAML = ">=6.0.7" types-requests = ">=2.27.25" typing-extensions = ">=4.2.0" [package.extras] -dev = ["black (==24.3.0)", "flake8 (==3.8.4)", "grpcio-tools (>=1.53.2)", "isort (==5.11.4)", "mypy (==0.950)", "mypy-protobuf (==3.2.0)", "numpy (==1.26.4)", "pytest (==7.1.2)", "pyyaml (==6.0.1)", "twine (==4.0.1)", "types-attrs (>=19.1.0)", "wheel (==0.38.4)"] +dev = ["black (==24.3.0)", "flake8 (==3.8.4)", "grpcio-tools (==1.46.0)", "isort (==5.11.4)", "mypy (==0.950)", "mypy-protobuf (==3.2.0)", "numpy (==1.26.4)", "pytest (==7.1.2)", "pyyaml (==6.0.1)", "twine (==4.0.1)", "types-attrs (>=19.1.0)", "wheel (==0.38.4)"] [[package]] name = "colorama" @@ -874,63 +874,63 @@ test-no-images = ["pytest", "pytest-cov", "wurlitzer"] [[package]] name = "coverage" -version = "7.5.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.extras] @@ -952,43 +952,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.8" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -1006,16 +1006,15 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cvprac" -version = "1.4.0" +version = "1.3.2" description = "Arista Cloudvision(R) Portal Rest API Client written in python" optional = true python-versions = "*" files = [ - {file = "cvprac-1.4.0.tar.gz", hash = "sha256:97506caa0b2d543c33011f72b9f755a6f170f4a75dc523307c294b22cd2e7236"}, + {file = "cvprac-1.3.2.tar.gz", hash = "sha256:5f0e5249c7e73d3c6f10a698be57f50382073773e6c81d74640fcc4976b602e5"}, ] [package.dependencies] -packaging = ">=23.2" requests = {version = ">=2.27.0", extras = ["socks"]} [package.extras] @@ -1269,13 +1268,13 @@ Django = ">=3.2" [[package]] name = "django-health-check" -version = "3.18.2" +version = "3.18.1" description = "Run checks on services like databases, queue servers, celery processes, etc." optional = false python-versions = ">=3.8" files = [ - {file = "django_health_check-3.18.2-py2.py3-none-any.whl", hash = "sha256:16f9c9186236cbc2858fa0d0ecc3566ba2ad2b72683e5678d0d58eb9e8bbba1a"}, - {file = "django_health_check-3.18.2.tar.gz", hash = "sha256:21235120f8d756fa75ba430d0b0dbb04620fbd7bfac92ed6a0b911915ba38918"}, + {file = "django-health-check-3.18.1.tar.gz", hash = "sha256:44552d55ae8950c9548d3b90f9d9fd5570b57446a19b2a8e674c82f993cb7a2c"}, + {file = "django_health_check-3.18.1-py2.py3-none-any.whl", hash = "sha256:2c89a326cd79830e2fc6808823a9e7e874ab23f7aef3ff2c4d1194c998e1dca1"}, ] [package.dependencies] @@ -1283,7 +1282,7 @@ django = ">=2.2" [package.extras] docs = ["sphinx"] -test = ["boto3", "celery", "django-storages", "pytest", "pytest-cov", "pytest-django", "redis"] +test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] [[package]] name = "django-jinja" @@ -1534,13 +1533,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.6.1" +version = "2024.4.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" files = [ - {file = "drf_spectacular_sidecar-2024.6.1-py3-none-any.whl", hash = "sha256:5ad678c788dcb36697a668884c6fdac2c511a4094cb010978bd01a6345197bbb"}, - {file = "drf_spectacular_sidecar-2024.6.1.tar.gz", hash = "sha256:eed744c26d2caff815fd67d89eca685f645479f07fb86c124d8ee26a13b1d960"}, + {file = "drf-spectacular-sidecar-2024.4.1.tar.gz", hash = "sha256:68532dd094714f79c1775c00848f22c10f004826abc856442ff30c3bc9c40bb4"}, + {file = "drf_spectacular_sidecar-2024.4.1-py3-none-any.whl", hash = "sha256:8359befe69a8953fea86be01c1ff37038854a62546225551de16c47c07dccd4e"}, ] [package.dependencies] @@ -1606,53 +1605,53 @@ pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "fonttools" -version = "4.53.0" +version = "4.51.0" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, - {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, - {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, - {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, - {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, - {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, - {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, - {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, - {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, - {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, - {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, - {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, - {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, - {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, + {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, + {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, + {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, + {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, + {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, + {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, + {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, + {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, + {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, + {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, + {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, + {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, + {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, + {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, + {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, + {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, + {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, + {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, + {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, + {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, + {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, + {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, + {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, + {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, + {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, + {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, + {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, ] [package.extras] @@ -1817,13 +1816,13 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", [[package]] name = "gprof2dot" -version = "2024.6.6" +version = "2022.7.29" description = "Generate a dot graph from the output of several profilers." optional = false -python-versions = ">=3.8" +python-versions = ">=2.7" files = [ - {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, - {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, + {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"}, + {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"}, ] [[package]] @@ -1920,13 +1919,13 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.45.3" +version = "0.44.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.45.3-py3-none-any.whl", hash = "sha256:ed1481a680ae3e28f91a06e0d8a51a5c9b97555aa2527abc2664447cc22337d6"}, - {file = "griffe-0.45.3.tar.gz", hash = "sha256:02ee71cc1a5035864b97bd0dbfff65c33f6f2c8854d3bd48a791905c2b8a44b9"}, + {file = "griffe-0.44.0-py3-none-any.whl", hash = "sha256:8a4471c469ba980b87c843f1168850ce39d0c1d0c7be140dca2480f76c8e5446"}, + {file = "griffe-0.44.0.tar.gz", hash = "sha256:34aee1571042f9bf00529bc715de4516fb6f482b164e90d030300601009e0223"}, ] [package.dependencies] @@ -1935,61 +1934,69 @@ colorama = ">=0.4" [[package]] name = "grpcio" -version = "1.64.1" +version = "1.62.2" description = "HTTP/2-based RPC framework" optional = true -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, + {file = "grpcio-1.62.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:66344ea741124c38588a664237ac2fa16dfd226964cca23ddc96bd4accccbde5"}, + {file = "grpcio-1.62.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5dab7ac2c1e7cb6179c6bfad6b63174851102cbe0682294e6b1d6f0981ad7138"}, + {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:3ad00f3f0718894749d5a8bb0fa125a7980a2f49523731a9b1fabf2b3522aa43"}, + {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e72ddfee62430ea80133d2cbe788e0d06b12f865765cb24a40009668bd8ea05"}, + {file = "grpcio-1.62.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53d3a59a10af4c2558a8e563aed9f256259d2992ae0d3037817b2155f0341de1"}, + {file = "grpcio-1.62.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1511a303f8074f67af4119275b4f954189e8313541da7b88b1b3a71425cdb10"}, + {file = "grpcio-1.62.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b94d41b7412ef149743fbc3178e59d95228a7064c5ab4760ae82b562bdffb199"}, + {file = "grpcio-1.62.2-cp310-cp310-win32.whl", hash = "sha256:a75af2fc7cb1fe25785be7bed1ab18cef959a376cdae7c6870184307614caa3f"}, + {file = "grpcio-1.62.2-cp310-cp310-win_amd64.whl", hash = "sha256:80407bc007754f108dc2061e37480238b0dc1952c855e86a4fc283501ee6bb5d"}, + {file = "grpcio-1.62.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:c1624aa686d4b36790ed1c2e2306cc3498778dffaf7b8dd47066cf819028c3ad"}, + {file = "grpcio-1.62.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:1c1bb80299bdef33309dff03932264636450c8fdb142ea39f47e06a7153d3063"}, + {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:db068bbc9b1fa16479a82e1ecf172a93874540cb84be69f0b9cb9b7ac3c82670"}, + {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc8a308780edbe2c4913d6a49dbdb5befacdf72d489a368566be44cadaef1a"}, + {file = "grpcio-1.62.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0695ae31a89f1a8fc8256050329a91a9995b549a88619263a594ca31b76d756"}, + {file = "grpcio-1.62.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88b4f9ee77191dcdd8810241e89340a12cbe050be3e0d5f2f091c15571cd3930"}, + {file = "grpcio-1.62.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a0204532aa2f1afd467024b02b4069246320405bc18abec7babab03e2644e75"}, + {file = "grpcio-1.62.2-cp311-cp311-win32.whl", hash = "sha256:6e784f60e575a0de554ef9251cbc2ceb8790914fe324f11e28450047f264ee6f"}, + {file = "grpcio-1.62.2-cp311-cp311-win_amd64.whl", hash = "sha256:112eaa7865dd9e6d7c0556c8b04ae3c3a2dc35d62ad3373ab7f6a562d8199200"}, + {file = "grpcio-1.62.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:65034473fc09628a02fb85f26e73885cf1ed39ebd9cf270247b38689ff5942c5"}, + {file = "grpcio-1.62.2-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d2c1771d0ee3cf72d69bb5e82c6a82f27fbd504c8c782575eddb7839729fbaad"}, + {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:3abe6838196da518863b5d549938ce3159d809218936851b395b09cad9b5d64a"}, + {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5ffeb269f10cedb4f33142b89a061acda9f672fd1357331dbfd043422c94e9e"}, + {file = "grpcio-1.62.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404d3b4b6b142b99ba1cff0b2177d26b623101ea2ce51c25ef6e53d9d0d87bcc"}, + {file = "grpcio-1.62.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:262cda97efdabb20853d3b5a4c546a535347c14b64c017f628ca0cc7fa780cc6"}, + {file = "grpcio-1.62.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17708db5b11b966373e21519c4c73e5a750555f02fde82276ea2a267077c68ad"}, + {file = "grpcio-1.62.2-cp312-cp312-win32.whl", hash = "sha256:b7ec9e2f8ffc8436f6b642a10019fc513722858f295f7efc28de135d336ac189"}, + {file = "grpcio-1.62.2-cp312-cp312-win_amd64.whl", hash = "sha256:aa787b83a3cd5e482e5c79be030e2b4a122ecc6c5c6c4c42a023a2b581fdf17b"}, + {file = "grpcio-1.62.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:cfd23ad29bfa13fd4188433b0e250f84ec2c8ba66b14a9877e8bce05b524cf54"}, + {file = "grpcio-1.62.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:af15e9efa4d776dfcecd1d083f3ccfb04f876d613e90ef8432432efbeeac689d"}, + {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:f4aa94361bb5141a45ca9187464ae81a92a2a135ce2800b2203134f7a1a1d479"}, + {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82af3613a219512a28ee5c95578eb38d44dd03bca02fd918aa05603c41018051"}, + {file = "grpcio-1.62.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ddaf53474e8caeb29eb03e3202f9d827ad3110475a21245f3c7712022882a9"}, + {file = "grpcio-1.62.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79b518c56dddeec79e5500a53d8a4db90da995dfe1738c3ac57fe46348be049"}, + {file = "grpcio-1.62.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5eb4844e5e60bf2c446ef38c5b40d7752c6effdee882f716eb57ae87255d20a"}, + {file = "grpcio-1.62.2-cp37-cp37m-win_amd64.whl", hash = "sha256:aaae70364a2d1fb238afd6cc9fcb10442b66e397fd559d3f0968d28cc3ac929c"}, + {file = "grpcio-1.62.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:1bcfe5070e4406f489e39325b76caeadab28c32bf9252d3ae960c79935a4cc36"}, + {file = "grpcio-1.62.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:da6a7b6b938c15fa0f0568e482efaae9c3af31963eec2da4ff13a6d8ec2888e4"}, + {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:41955b641c34db7d84db8d306937b72bc4968eef1c401bea73081a8d6c3d8033"}, + {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c772f225483905f675cb36a025969eef9712f4698364ecd3a63093760deea1bc"}, + {file = "grpcio-1.62.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ce1f775d37ca18c7a141300e5b71539690efa1f51fe17f812ca85b5e73262f"}, + {file = "grpcio-1.62.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:26f415f40f4a93579fd648f48dca1c13dfacdfd0290f4a30f9b9aeb745026811"}, + {file = "grpcio-1.62.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:db707e3685ff16fc1eccad68527d072ac8bdd2e390f6daa97bc394ea7de4acea"}, + {file = "grpcio-1.62.2-cp38-cp38-win32.whl", hash = "sha256:589ea8e75de5fd6df387de53af6c9189c5231e212b9aa306b6b0d4f07520fbb9"}, + {file = "grpcio-1.62.2-cp38-cp38-win_amd64.whl", hash = "sha256:3c3ed41f4d7a3aabf0f01ecc70d6b5d00ce1800d4af652a549de3f7cf35c4abd"}, + {file = "grpcio-1.62.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:162ccf61499c893831b8437120600290a99c0bc1ce7b51f2c8d21ec87ff6af8b"}, + {file = "grpcio-1.62.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:f27246d7da7d7e3bd8612f63785a7b0c39a244cf14b8dd9dd2f2fab939f2d7f1"}, + {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:2507006c8a478f19e99b6fe36a2464696b89d40d88f34e4b709abe57e1337467"}, + {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a90ac47a8ce934e2c8d71e317d2f9e7e6aaceb2d199de940ce2c2eb611b8c0f4"}, + {file = "grpcio-1.62.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99701979bcaaa7de8d5f60476487c5df8f27483624f1f7e300ff4669ee44d1f2"}, + {file = "grpcio-1.62.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:af7dc3f7a44f10863b1b0ecab4078f0a00f561aae1edbd01fd03ad4dcf61c9e9"}, + {file = "grpcio-1.62.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fa63245271920786f4cb44dcada4983a3516be8f470924528cf658731864c14b"}, + {file = "grpcio-1.62.2-cp39-cp39-win32.whl", hash = "sha256:c6ad9c39704256ed91a1cffc1379d63f7d0278d6a0bad06b0330f5d30291e3a3"}, + {file = "grpcio-1.62.2-cp39-cp39-win_amd64.whl", hash = "sha256:16da954692fd61aa4941fbeda405a756cd96b97b5d95ca58a92547bba2c1624f"}, + {file = "grpcio-1.62.2.tar.gz", hash = "sha256:c77618071d96b7a8be2c10701a98537823b9c65ba256c0b9067e0594cdbd954d"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] +protobuf = ["grpcio-tools (>=1.62.2)"] [[package]] name = "h11" @@ -2059,105 +2066,100 @@ files = [ [[package]] name = "ijson" -version = "3.3.0" +version = "3.2.3" description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" files = [ - {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, - {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, - {file = "ijson-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b73b493af9e947caed75d329676b1b801d673b17481962823a3e55fe529c8b8b"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5576415f3d76290b160aa093ff968f8bf6de7d681e16e463a0134106b506f49"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e9ffe358d5fdd6b878a8a364e96e15ca7ca57b92a48f588378cef315a8b019e"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8643c255a25824ddd0895c59f2319c019e13e949dc37162f876c41a283361527"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:df3ab5e078cab19f7eaeef1d5f063103e1ebf8c26d059767b26a6a0ad8b250a3"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dc1fb02c6ed0bae1b4bf96971258bf88aea72051b6e4cebae97cff7090c0607"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e9afd97339fc5a20f0542c971f90f3ca97e73d3050cdc488d540b63fae45329a"}, - {file = "ijson-3.3.0-cp310-cp310-win32.whl", hash = "sha256:844c0d1c04c40fd1b60f148dc829d3f69b2de789d0ba239c35136efe9a386529"}, - {file = "ijson-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d654d045adafdcc6c100e8e911508a2eedbd2a1b5f93f930ba13ea67d7704ee9"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:501dce8eaa537e728aa35810656aa00460a2547dcb60937c8139f36ec344d7fc"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658ba9cad0374d37b38c9893f4864f284cdcc7d32041f9808fba8c7bcaadf134"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2636cb8c0f1023ef16173f4b9a233bcdb1df11c400c603d5f299fac143ca8d70"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd174b90db68c3bcca273e9391934a25d76929d727dc75224bf244446b28b03b"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97a9aea46e2a8371c4cf5386d881de833ed782901ac9f67ebcb63bb3b7d115af"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c594c0abe69d9d6099f4ece17763d53072f65ba60b372d8ba6de8695ce6ee39e"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e0ff16c224d9bfe4e9e6bd0395826096cda4a3ef51e6c301e1b61007ee2bd24"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0015354011303175eae7e2ef5136414e91de2298e5a2e9580ed100b728c07e51"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034642558afa57351a0ffe6de89e63907c4cf6849070cc10a3b2542dccda1afe"}, - {file = "ijson-3.3.0-cp311-cp311-win32.whl", hash = "sha256:192e4b65495978b0bce0c78e859d14772e841724d3269fc1667dc6d2f53cc0ea"}, - {file = "ijson-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:72e3488453754bdb45c878e31ce557ea87e1eb0f8b4fc610373da35e8074ce42"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:988e959f2f3d59ebd9c2962ae71b97c0df58323910d0b368cc190ad07429d1bb"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2f73f0d0fce5300f23a1383d19b44d103bb113b57a69c36fd95b7c03099b181"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ee57a28c6bf523d7cb0513096e4eb4dac16cd935695049de7608ec110c2b751"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0155a8f079c688c2ccaea05de1ad69877995c547ba3d3612c1c336edc12a3a5"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ab00721304af1ae1afa4313ecfa1bf16b07f55ef91e4a5b93aeaa3e2bd7917c"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40ee3821ee90be0f0e95dcf9862d786a7439bd1113e370736bfdf197e9765bfb"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3b6987a0bc3e6d0f721b42c7a0198ef897ae50579547b0345f7f02486898f5"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:63afea5f2d50d931feb20dcc50954e23cef4127606cc0ecf7a27128ed9f9a9e6"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b5c3e285e0735fd8c5a26d177eca8b52512cdd8687ca86ec77a0c66e9c510182"}, - {file = "ijson-3.3.0-cp312-cp312-win32.whl", hash = "sha256:907f3a8674e489abdcb0206723e5560a5cb1fa42470dcc637942d7b10f28b695"}, - {file = "ijson-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8f890d04ad33262d0c77ead53c85f13abfb82f2c8f078dfbf24b78f59534dfdd"}, - {file = "ijson-3.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b9d85a02e77ee8ea6d9e3fd5d515bcc3d798d9c1ea54817e5feb97a9bc5d52fe"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6576cdc36d5a09b0c1a3d81e13a45d41a6763188f9eaae2da2839e8a4240bce"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5589225c2da4bb732c9c370c5961c39a6db72cf69fb2a28868a5413ed7f39e6"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad04cf38164d983e85f9cba2804566c0160b47086dcca4cf059f7e26c5ace8ca"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:a3b730ef664b2ef0e99dec01b6573b9b085c766400af363833e08ebc1e38eb2f"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:4690e3af7b134298055993fcbea161598d23b6d3ede11b12dca6815d82d101d5"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:aaa6bfc2180c31a45fac35d40e3312a3d09954638ce0b2e9424a88e24d262a13"}, - {file = "ijson-3.3.0-cp36-cp36m-win32.whl", hash = "sha256:44367090a5a876809eb24943f31e470ba372aaa0d7396b92b953dda953a95d14"}, - {file = "ijson-3.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7e2b3e9ca957153557d06c50a26abaf0d0d6c0ddf462271854c968277a6b5372"}, - {file = "ijson-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47c144117e5c0e2babb559bc8f3f76153863b8dd90b2d550c51dab5f4b84a87f"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ce02af5fbf9ba6abb70765e66930aedf73311c7d840478f1ccecac53fefbf3"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac6c3eeed25e3e2cb9b379b48196413e40ac4e2239d910bb33e4e7f6c137745"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d92e339c69b585e7b1d857308ad3ca1636b899e4557897ccd91bb9e4a56c965b"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8c85447569041939111b8c7dbf6f8fa7a0eb5b2c4aebb3c3bec0fb50d7025121"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:542c1e8fddf082159a5d759ee1412c73e944a9a2412077ed00b303ff796907dc"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:30cfea40936afb33b57d24ceaf60d0a2e3d5c1f2335ba2623f21d560737cc730"}, - {file = "ijson-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:6b661a959226ad0d255e49b77dba1d13782f028589a42dc3172398dd3814c797"}, - {file = "ijson-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0b003501ee0301dbf07d1597482009295e16d647bb177ce52076c2d5e64113e0"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e8d8de44effe2dbd0d8f3eb9840344b2d5b4cc284a14eb8678aec31d1b6bea8"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9cd5c03c63ae06d4f876b9844c5898d0044c7940ff7460db9f4cd984ac7862b5"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04366e7e4a4078d410845e58a2987fd9c45e63df70773d7b6e87ceef771b51ee"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de7c1ddb80fa7a3ab045266dca169004b93f284756ad198306533b792774f10a"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8851584fb931cffc0caa395f6980525fd5116eab8f73ece9d95e6f9c2c326c4c"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdcfc88347fd981e53c33d832ce4d3e981a0d696b712fbcb45dcc1a43fe65c65"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3917b2b3d0dbbe3296505da52b3cb0befbaf76119b2edaff30bd448af20b5400"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e10c14535abc7ddf3fd024aa36563cd8ab5d2bb6234a5d22c77c30e30fa4fb2b"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3aba5c4f97f4e2ce854b5591a8b0711ca3b0c64d1b253b04ea7b004b0a197ef6"}, - {file = "ijson-3.3.0-cp38-cp38-win32.whl", hash = "sha256:b325f42e26659df1a0de66fdb5cde8dd48613da9c99c07d04e9fb9e254b7ee1c"}, - {file = "ijson-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:ff835906f84451e143f31c4ce8ad73d83ef4476b944c2a2da91aec8b649570e1"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c556f5553368dff690c11d0a1fb435d4ff1f84382d904ccc2dc53beb27ba62e"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4396b55a364a03ff7e71a34828c3ed0c506814dd1f50e16ebed3fc447d5188e"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6850ae33529d1e43791b30575070670070d5fe007c37f5d06aebc1dd152ab3f"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36aa56d68ea8def26778eb21576ae13f27b4a47263a7a2581ab2ef58b8de4451"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7ec759c4a0fc820ad5dc6a58e9c391e7b16edcb618056baedbedbb9ea3b1524"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b51bab2c4e545dde93cb6d6bb34bf63300b7cd06716f195dd92d9255df728331"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92355f95a0e4da96d4c404aa3cff2ff033f9180a9515f813255e1526551298c1"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8795e88adff5aa3c248c1edce932db003d37a623b5787669ccf205c422b91e4a"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8f83f553f4cde6d3d4eaf58ec11c939c94a0ec545c5b287461cafb184f4b3a14"}, - {file = "ijson-3.3.0-cp39-cp39-win32.whl", hash = "sha256:ead50635fb56577c07eff3e557dac39533e0fe603000684eea2af3ed1ad8f941"}, - {file = "ijson-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8a9befb0c0369f0cf5c1b94178d0d78f66d9cebb9265b36be6e4f66236076b8"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2af323a8aec8a50fa9effa6d640691a30a9f8c4925bd5364a1ca97f1ac6b9b5c"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f64f01795119880023ba3ce43072283a393f0b90f52b66cc0ea1a89aa64a9ccb"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a716e05547a39b788deaf22725490855337fc36613288aa8ae1601dc8c525553"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f5d921fadc135d1ad698e2697025045cd8ed7e5e842258295012d8a3bc702"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd26b396bc3a1e85f4acebeadbf627fa6117b97f4c10b177d5779577c6607744"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:25fd49031cdf5fd5f1fd21cb45259a64dad30b67e64f745cc8926af1c8c243d3"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b72178b1e565d06ab19319965022b36ef41bcea7ea153b32ec31194bec032a2"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d0b6b637d05dbdb29d0bfac2ed8425bb369e7af5271b0cc7cf8b801cb7360c2"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378d0baa59ae422905c5f182ea0fd74fe7e52a23e3821067a7d58c8306b2191"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:99f5c8ab048ee4233cc4f2b461b205cbe01194f6201018174ac269bf09995749"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45ff05de889f3dc3d37a59d02096948ce470699f2368b32113954818b21aa74a"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efb521090dd6cefa7aafd120581947b29af1713c902ff54336b7c7130f04c47"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c727691858fd3a1c085d9980d12395517fcbbf02c69fbb22dede8ee03422da"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0420c24e50389bc251b43c8ed379ab3e3ba065ac8262d98beb6735ab14844460"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8fdf3721a2aa7d96577970f5604bd81f426969c1822d467f07b3d844fa2fecc7"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:891f95c036df1bc95309951940f8eea8537f102fa65715cdc5aae20b8523813b"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1336a2a6e5c427f419da0154e775834abcbc8ddd703004108121c6dd9eba9d"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0c819f83e4f7b7f7463b2dc10d626a8be0c85fbc7b3db0edc098c2b16ac968e"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33afc25057377a6a43c892de34d229a86f89ea6c4ca3dd3db0dcd17becae0dbb"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7914d0cf083471856e9bc2001102a20f08e82311dfc8cf1a91aa422f9414a0d6"}, - {file = "ijson-3.3.0.tar.gz", hash = "sha256:7f172e6ba1bee0d4c8f8ebd639577bfe429dee0f3f96775a067b8bae4492d8a0"}, + {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a4ae076bf97b0430e4e16c9cb635a6b773904aec45ed8dcbc9b17211b8569ba"}, + {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cfced0a6ec85916eb8c8e22415b7267ae118eaff2a860c42d2cc1261711d0d31"}, + {file = "ijson-3.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b9d1141cfd1e6d6643aa0b4876730d0d28371815ce846d2e4e84a2d4f471cf3"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e0a27db6454edd6013d40a956d008361aac5bff375a9c04ab11fc8c214250b5"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0d526ccb335c3c13063c273637d8611f32970603dfb182177b232d01f14c23"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:545a30b3659df2a3481593d30d60491d1594bc8005f99600e1bba647bb44cbb5"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9680e37a10fedb3eab24a4a7e749d8a73f26f1a4c901430e7aa81b5da15f7307"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2a80c0bb1053055d1599e44dc1396f713e8b3407000e6390add72d49633ff3bb"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f05ed49f434ce396ddcf99e9fd98245328e99f991283850c309f5e3182211a79"}, + {file = "ijson-3.2.3-cp310-cp310-win32.whl", hash = "sha256:b4eb2304573c9fdf448d3fa4a4fdcb727b93002b5c5c56c14a5ffbbc39f64ae4"}, + {file = "ijson-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:923131f5153c70936e8bd2dd9dcfcff43c67a3d1c789e9c96724747423c173eb"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:904f77dd3d87736ff668884fe5197a184748eb0c3e302ded61706501d0327465"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0974444c1f416e19de1e9f567a4560890095e71e81623c509feff642114c1e53"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1a4b8eb69b6d7b4e94170aa991efad75ba156b05f0de2a6cd84f991def12ff9"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d052417fd7ce2221114f8d3b58f05a83c1a2b6b99cafe0b86ac9ed5e2fc889df"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b8064a85ec1b0beda7dd028e887f7112670d574db606f68006c72dd0bb0e0e2"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaac293853f1342a8d2a45ac1f723c860f700860e7743fb97f7b76356df883a8"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6c32c18a934c1dc8917455b0ce478fd7a26c50c364bd52c5a4fb0fc6bb516af7"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:713a919e0220ac44dab12b5fed74f9130f3480e55e90f9d80f58de129ea24f83"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, + {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, + {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, + {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, + {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, + {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85afdb3f3a5d0011584d4fa8e6dccc5936be51c27e84cd2882fe904ca3bd04c5"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4fc35d569eff3afa76bfecf533f818ecb9390105be257f3f83c03204661ace70"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:455d7d3b7a6aacfb8ab1ebcaf697eedf5be66e044eac32508fccdc633d995f0e"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c63f3d57dbbac56cead05b12b81e8e1e259f14ce7f233a8cbe7fa0996733b628"}, + {file = "ijson-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:a4d7fe3629de3ecb088bff6dfe25f77be3e8261ed53d5e244717e266f8544305"}, + {file = "ijson-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:96190d59f015b5a2af388a98446e411f58ecc6a93934e036daa75f75d02386a0"}, + {file = "ijson-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35194e0b8a2bda12b4096e2e792efa5d4801a0abb950c48ade351d479cd22ba5"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1053fb5f0b010ee76ca515e6af36b50d26c1728ad46be12f1f147a835341083"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:211124cff9d9d139dd0dfced356f1472860352c055d2481459038b8205d7d742"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92dc4d48e9f6a271292d6079e9fcdce33c83d1acf11e6e12696fb05c5889fe74"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3dcc33ee56f92a77f48776014ddb47af67c33dda361e84371153c4f1ed4434e1"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98c6799925a5d1988da4cd68879b8eeab52c6e029acc45e03abb7921a4715c4b"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4252e48c95cd8ceefc2caade310559ab61c37d82dfa045928ed05328eb5b5f65"}, + {file = "ijson-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:644f4f03349ff2731fd515afd1c91b9e439e90c9f8c28292251834154edbffca"}, + {file = "ijson-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:ba33c764afa9ecef62801ba7ac0319268a7526f50f7601370d9f8f04e77fc02b"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4b2ec8c2a3f1742cbd5f36b65e192028e541b5fd8c7fd97c1fc0ca6c427c704a"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7dc357da4b4ebd8903e77dbcc3ce0555ee29ebe0747c3c7f56adda423df8ec89"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcc51c84bb220ac330122468fe526a7777faa6464e3b04c15b476761beea424f"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8d54b624629f9903005c58d9321a036c72f5c212701bbb93d1a520ecd15e370"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6ea7c7e3ec44742e867c72fd750c6a1e35b112f88a917615332c4476e718d40"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:916acdc5e504f8b66c3e287ada5d4b39a3275fc1f2013c4b05d1ab9933671a6c"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81815b4184b85ce124bfc4c446d5f5e5e643fc119771c5916f035220ada29974"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b49fd5fe1cd9c1c8caf6c59f82b08117dd6bea2ec45b641594e25948f48f4169"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:86b3c91fdcb8ffb30556c9669930f02b7642de58ca2987845b04f0d7fe46d9a8"}, + {file = "ijson-3.2.3-cp38-cp38-win32.whl", hash = "sha256:a729b0c8fb935481afe3cf7e0dadd0da3a69cc7f145dbab8502e2f1e01d85a7c"}, + {file = "ijson-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:d34e049992d8a46922f96483e96b32ac4c9cffd01a5c33a928e70a283710cd58"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9c2a12dcdb6fa28f333bf10b3a0f80ec70bc45280d8435be7e19696fab2bc706"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1844c5b57da21466f255a0aeddf89049e730d7f3dfc4d750f0e65c36e6a61a7c"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ec3e5ff2515f1c40ef6a94983158e172f004cd643b9e4b5302017139b6c96e4"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46bafb1b9959872a1f946f8dd9c6f1a30a970fc05b7bfae8579da3f1f988e598"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab4db9fee0138b60e31b3c02fff8a4c28d7b152040553b6a91b60354aebd4b02"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4bc87e69d1997c6a55fff5ee2af878720801ff6ab1fb3b7f94adda050651e37"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9fd906f0c38e9f0bfd5365e1bed98d649f506721f76bb1a9baa5d7374f26f19"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e84d27d1acb60d9102728d06b9650e5b7e5cb0631bd6e3dfadba8fb6a80d6c2f"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2cc04fc0a22bb945cd179f614845c8b5106c0b3939ee0d84ce67c7a61ac1a936"}, + {file = "ijson-3.2.3-cp39-cp39-win32.whl", hash = "sha256:e641814793a037175f7ec1b717ebb68f26d89d82cfd66f36e588f32d7e488d5f"}, + {file = "ijson-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:6bd3e7e91d031f1e8cea7ce53f704ab74e61e505e8072467e092172422728b22"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06f9707da06a19b01013f8c65bf67db523662a9b4a4ff027e946e66c261f17f0"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be8495f7c13fa1f622a2c6b64e79ac63965b89caf664cc4e701c335c652d15f2"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7596b42f38c3dcf9d434dddd50f46aeb28e96f891444c2b4b1266304a19a2c09"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbac4e9609a1086bbad075beb2ceec486a3b138604e12d2059a33ce2cba93051"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:db2d6341f9cb538253e7fe23311d59252f124f47165221d3c06a7ed667ecd595"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fa8b98be298efbb2588f883f9953113d8a0023ab39abe77fe734b71b46b1220a"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:674e585361c702fad050ab4c153fd168dc30f5980ef42b64400bc84d194e662d"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd12e42b9cb9c0166559a3ffa276b4f9fc9d5b4c304e5a13668642d34b48b634"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d31e0d771d82def80cd4663a66de277c3b44ba82cd48f630526b52f74663c639"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ce4c70c23521179d6da842bb9bc2e36bb9fad1e0187e35423ff0f282890c9ca"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39f551a6fbeed4433c85269c7c8778e2aaea2501d7ebcb65b38f556030642c17"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b14d322fec0de7af16f3ef920bf282f0dd747200b69e0b9628117f381b7775b"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7851a341429b12d4527ca507097c959659baf5106c7074d15c17c387719ffbcd"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db3bf1b42191b5cc9b6441552fdcb3b583594cb6b19e90d1578b7cbcf80d0fae"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6f662dc44362a53af3084d3765bb01cd7b4734d1f484a6095cad4cb0cbfe5374"}, + {file = "ijson-3.2.3.tar.gz", hash = "sha256:10294e9bf89cb713da05bc4790bdff616610432db561964827074898e174f917"}, ] [[package]] @@ -2376,13 +2378,13 @@ testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -3153,13 +3155,13 @@ testing = ["beautifulsoup4", "coverage", "docutils (>=0.17.0,<0.18.0)", "pytest [[package]] name = "nautobot" -version = "2.2.5" +version = "2.2.2" description = "Source of truth and network automation platform." optional = false python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot-2.2.5-py3-none-any.whl", hash = "sha256:8b4256cb5f76b13d56c754b8a04e2869bc78d6a6593b2e7aae8094073320cb49"}, - {file = "nautobot-2.2.5.tar.gz", hash = "sha256:0b0ac6aae922092dad271feccfef3efe1e1482284b23d0acbdb0c61f78227b57"}, + {file = "nautobot-2.2.2-py3-none-any.whl", hash = "sha256:4a77b13d60f004815007d519f29602bba5b9ff899d235bb055a64ce9b6f461ce"}, + {file = "nautobot-2.2.2.tar.gz", hash = "sha256:502fd0bf8691900b1c86c307e8bb3765990890a02e656c4af8e0b9cc3b7cc6f4"}, ] [package.dependencies] @@ -3190,7 +3192,7 @@ emoji = ">=2.11.0,<2.12.0" GitPython = ">=3.1.43,<3.2.0" graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" -Jinja2 = ">=3.1.4,<3.2.0" +Jinja2 = ">=3.1.3,<3.2.0" jsonschema = ">=4.7.0,<5.0.0" Markdown = ">=3.5.2,<3.6.0" MarkupSafe = ">=2.1.5,<2.2.0" @@ -3204,16 +3206,16 @@ psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.3,<8.1.0" pyuwsgi = ">=2.0.23,<2.1.0" PyYAML = ">=6.0,<6.1" -social-auth-app-django = ">=5.4.1,<5.5.0" +social-auth-app-django = ">=5.4.0,<5.5.0" svgwrite = ">=1.4.2,<1.5.0" [package.extras] -all = ["django-auth-ldap (>=4.7.0,<4.8.0)", "django-storages (>=1.14.2,<1.15.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[saml] (>=4.5.3,<4.6.0)"] +all = ["django-auth-ldap (>=4.7.0,<4.8.0)", "django-storages (>=1.14.2,<1.15.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] ldap = ["django-auth-ldap (>=4.7.0,<4.8.0)"] mysql = ["mysqlclient (>=2.2.3,<2.3.0)"] napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.14.2,<1.15.0)"] -sso = ["social-auth-core[saml] (>=4.5.3,<4.6.0)"] +sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] [[package]] name = "nautobot-capacity-metrics" @@ -3584,19 +3586,18 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] [[package]] name = "prometheus-client" @@ -3630,13 +3631,13 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -3644,22 +3645,33 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.25.3" -description = "" +version = "3.20.3" +description = "Protocol Buffers" optional = true -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, ] [[package]] @@ -3882,47 +3894,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.16" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a539ac40551b01a85e899829aa43ca8036707474af8d74b48be288d4d2d2846"}, - {file = "pydantic-1.10.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a4fcc7b0b8038dbda2dda642cff024032dfae24a7960cc58e57a39eb1949b9b"}, - {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4660dd697de1ae2d4305a85161312611f64d5360663a9ba026cd6ad9e3fe14c3"}, - {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:900a787c574f903a97d0bf52a43ff3b6cf4fa0119674bcfc0e5fd1056d388ad9"}, - {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d30192a63e6d3334c3f0c0506dd6ae9f1dce7b2f8845518915291393a5707a22"}, - {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:16cf23ed599ca5ca937e37ba50ab114e6b5c387eb43a6cc533701605ad1be611"}, - {file = "pydantic-1.10.16-cp310-cp310-win_amd64.whl", hash = "sha256:8d23111f41d1e19334edd51438fd57933f3eee7d9d2fa8cc3f5eda515a272055"}, - {file = "pydantic-1.10.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef287b8d7fc0e86a8bd1f902c61aff6ba9479c50563242fe88ba39692e98e1e0"}, - {file = "pydantic-1.10.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b9ded699bfd3b3912d796ff388b0c607e6d35d41053d37aaf8fd6082c660de9a"}, - {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daeb199814333e4426c5e86d7fb610f4e230289f28cab90eb4de27330bef93cf"}, - {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5973843f1fa99ec6c3ac8d1a8698ac9340b35e45cca6c3e5beb5c3bd1ef15de6"}, - {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6b8a7788a8528a558828fe4a48783cafdcf2612d13c491594a8161dc721629c"}, - {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8abaecf54dacc9d991dda93c3b880d41092a8924cde94eeb811d7d9ab55df7d8"}, - {file = "pydantic-1.10.16-cp311-cp311-win_amd64.whl", hash = "sha256:ddc7b682fbd23f051edc419dc6977e11dd2dbdd0cef9d05f0e15d1387862d230"}, - {file = "pydantic-1.10.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:067c2b5539f7839653ad8c3d1fc2f1343338da8677b7b2172abf3cd3fdc8f719"}, - {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d1fc943583c046ecad0ff5d6281ee571b64e11b5503d9595febdce54f38b290"}, - {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18548b30ccebe71d380b0886cc44ea5d80afbcc155e3518792f13677ad06097d"}, - {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4e92292f9580fc5ea517618580fac24e9f6dc5657196e977c194a8e50e14f5a9"}, - {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5da8bc4bb4f85b8c97cc7f11141fddbbd29eb25e843672e5807e19cc3d7c1b7f"}, - {file = "pydantic-1.10.16-cp37-cp37m-win_amd64.whl", hash = "sha256:a04ee1ea34172b87707a6ecfcdb120d7656892206b7c4dbdb771a73e90179fcb"}, - {file = "pydantic-1.10.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4fa86469fd46e732242c7acb83282d33f83591a7e06f840481327d5bf6d96112"}, - {file = "pydantic-1.10.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:89c2783dc261726fe7a5ce1121bce29a2f7eb9b1e704c68df2b117604e3b346f"}, - {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78e59fa919fa7a192f423d190d8660c35dd444efa9216662273f36826765424b"}, - {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7e82a80068c77f4b074032e031e642530b6d45cb8121fc7c99faa31fb6c6b72"}, - {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d82d5956cee27a30e26a5b88d00a6a2a15a4855e13c9baf50175976de0dc282c"}, - {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b7b99424cc0970ff08deccb549b5a6ec1040c0b449eab91723e64df2bd8fdca"}, - {file = "pydantic-1.10.16-cp38-cp38-win_amd64.whl", hash = "sha256:d97a35e1ba59442775201657171f601a2879e63517a55862a51f8d67cdfc0017"}, - {file = "pydantic-1.10.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d91f6866fd3e303c632207813ef6bc4d86055e21c5e5a0a311983a9ac5f0192"}, - {file = "pydantic-1.10.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8d3c71d14c8bd26d2350c081908dbf59d5a6a8f9596d9ef2b09cc1e61c8662b"}, - {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b73e6386b439b4881d79244e9fc1e32d1e31e8d784673f5d58a000550c94a6c0"}, - {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f039881fb2ef86f6de6eacce6e71701b47500355738367413ccc1550b2a69cf"}, - {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3895ddb26f22bdddee7e49741486aa7b389258c6f6771943e87fc00eabd79134"}, - {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55b945da2756b5cef93d792521ad0d457fdf2f69fd5a2d10a27513f5281717dd"}, - {file = "pydantic-1.10.16-cp39-cp39-win_amd64.whl", hash = "sha256:22dd265c77c3976a34be78409b128cb84629284dfd1b69d2fa1507a36f84dc8b"}, - {file = "pydantic-1.10.16-py3-none-any.whl", hash = "sha256:aa2774ba5412fd1c5cb890d08e8b0a3bb5765898913ba1f61a65a4810f03cf29"}, - {file = "pydantic-1.10.16.tar.gz", hash = "sha256:8bb388f6244809af69ee384900b10b677a69f1980fdc655ea419710cffcb5610"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -3945,16 +3957,17 @@ files = [ [[package]] name = "pygments" -version = "2.18.0" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] +plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -3976,17 +3989,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "3.2.3" +version = "3.1.0" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, - {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, + {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, + {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, ] [package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" +astroid = ">=3.1.0,<=3.2.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -4139,12 +4152,13 @@ files = [ [[package]] name = "python-crontab" -version = "3.1.0" +version = "3.0.0" description = "Python Crontab API" optional = false python-versions = "*" files = [ - {file = "python-crontab-3.1.0.tar.gz", hash = "sha256:f4ea1605d24533b67fa7a634ef26cb59a5f2e7954f6e677d2d7a2229959a2fc8"}, + {file = "python-crontab-3.0.0.tar.gz", hash = "sha256:79fb7465039ddfd4fb93d072d6ee0d45c1ac8bf1597f0686ea14fd4361dba379"}, + {file = "python_crontab-3.0.0-py3-none-any.whl", hash = "sha256:6d5ba3c190ec76e4d252989a1644fcb233dbf53fbc8fceeb9febe1657b9fb1d4"}, ] [package.dependencies] @@ -4367,13 +4381,13 @@ pyyaml = "*" [[package]] name = "redis" -version = "5.0.6" +version = "5.0.3" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, - {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, + {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"}, + {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"}, ] [package.dependencies] @@ -4385,101 +4399,115 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "regex" -version = "2024.5.15" +version = "2024.4.16" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb83cc090eac63c006871fd24db5e30a1f282faa46328572661c0a24a2323a08"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c91e1763696c0eb66340c4df98623c2d4e77d0746b8f8f2bee2c6883fd1fe18"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10188fe732dec829c7acca7422cdd1bf57d853c7199d5a9e96bb4d40db239c73"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:956b58d692f235cfbf5b4f3abd6d99bf102f161ccfe20d2fd0904f51c72c4c66"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a70b51f55fd954d1f194271695821dd62054d949efd6368d8be64edd37f55c86"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c02fcd2bf45162280613d2e4a1ca3ac558ff921ae4e308ecb307650d3a6ee51"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ed75ea6892a56896d78f11006161eea52c45a14994794bcfa1654430984b22"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd727ad276bb91928879f3aa6396c9a1d34e5e180dce40578421a691eeb77f47"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7cbc5d9e8a1781e7be17da67b92580d6ce4dcef5819c1b1b89f49d9678cc278c"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78fddb22b9ef810b63ef341c9fcf6455232d97cfe03938cbc29e2672c436670e"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:445ca8d3c5a01309633a0c9db57150312a181146315693273e35d936472df912"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:95399831a206211d6bc40224af1c635cb8790ddd5c7493e0bd03b85711076a53"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7731728b6568fc286d86745f27f07266de49603a6fdc4d19c87e8c247be452af"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4facc913e10bdba42ec0aee76d029aedda628161a7ce4116b16680a0413f658a"}, + {file = "regex-2024.4.16-cp310-cp310-win32.whl", hash = "sha256:911742856ce98d879acbea33fcc03c1d8dc1106234c5e7d068932c945db209c0"}, + {file = "regex-2024.4.16-cp310-cp310-win_amd64.whl", hash = "sha256:e0a2df336d1135a0b3a67f3bbf78a75f69562c1199ed9935372b82215cddd6e2"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1210365faba7c2150451eb78ec5687871c796b0f1fa701bfd2a4a25420482d26"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ab40412f8cd6f615bfedea40c8bf0407d41bf83b96f6fc9ff34976d6b7037fd"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd80d1280d473500d8086d104962a82d77bfbf2b118053824b7be28cd5a79ea5"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb966fdd9217e53abf824f437a5a2d643a38d4fd5fd0ca711b9da683d452969"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20b7a68444f536365af42a75ccecb7ab41a896a04acf58432db9e206f4e525d6"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74586dd0b039c62416034f811d7ee62810174bb70dffcca6439f5236249eb09"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8290b44d8b0af4e77048646c10c6e3aa583c1ca67f3b5ffb6e06cf0c6f0f89"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2d80a6749724b37853ece57988b39c4e79d2b5fe2869a86e8aeae3bbeef9eb0"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a1018e97aeb24e4f939afcd88211ace472ba566efc5bdf53fd8fd7f41fa7170"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d015604ee6204e76569d2f44e5a210728fa917115bef0d102f4107e622b08d5"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3d5ac5234fb5053850d79dd8eb1015cb0d7d9ed951fa37aa9e6249a19aa4f336"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a38d151e2cdd66d16dab550c22f9521ba79761423b87c01dae0a6e9add79c0d"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159dc4e59a159cb8e4e8f8961eb1fa5d58f93cb1acd1701d8aff38d45e1a84a6"}, + {file = "regex-2024.4.16-cp311-cp311-win32.whl", hash = "sha256:ba2336d6548dee3117520545cfe44dc28a250aa091f8281d28804aa8d707d93d"}, + {file = "regex-2024.4.16-cp311-cp311-win_amd64.whl", hash = "sha256:8f83b6fd3dc3ba94d2b22717f9c8b8512354fd95221ac661784df2769ea9bba9"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80b696e8972b81edf0af2a259e1b2a4a661f818fae22e5fa4fa1a995fb4a40fd"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d61ae114d2a2311f61d90c2ef1358518e8f05eafda76eaf9c772a077e0b465ec"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ba6745440b9a27336443b0c285d705ce73adb9ec90e2f2004c64d95ab5a7598"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295004b2dd37b0835ea5c14a33e00e8cfa3c4add4d587b77287825f3418d310"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aba818dcc7263852aabb172ec27b71d2abca02a593b95fa79351b2774eb1d2b"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0800631e565c47520aaa04ae38b96abc5196fe8b4aa9bd864445bd2b5848a7a"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08dea89f859c3df48a440dbdcd7b7155bc675f2fa2ec8c521d02dc69e877db70"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eeaa0b5328b785abc344acc6241cffde50dc394a0644a968add75fcefe15b9d4"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4e819a806420bc010489f4e741b3036071aba209f2e0989d4750b08b12a9343f"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c2d0e7cbb6341e830adcbfa2479fdeebbfbb328f11edd6b5675674e7a1e37730"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:91797b98f5e34b6a49f54be33f72e2fb658018ae532be2f79f7c63b4ae225145"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:d2da13568eff02b30fd54fccd1e042a70fe920d816616fda4bf54ec705668d81"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:370c68dc5570b394cbaadff50e64d705f64debed30573e5c313c360689b6aadc"}, + {file = "regex-2024.4.16-cp312-cp312-win32.whl", hash = "sha256:904c883cf10a975b02ab3478bce652f0f5346a2c28d0a8521d97bb23c323cc8b"}, + {file = "regex-2024.4.16-cp312-cp312-win_amd64.whl", hash = "sha256:785c071c982dce54d44ea0b79cd6dfafddeccdd98cfa5f7b86ef69b381b457d9"}, + {file = "regex-2024.4.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2f142b45c6fed48166faeb4303b4b58c9fcd827da63f4cf0a123c3480ae11fb"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87ab229332ceb127a165612d839ab87795972102cb9830e5f12b8c9a5c1b508"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81500ed5af2090b4a9157a59dbc89873a25c33db1bb9a8cf123837dcc9765047"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b340cccad138ecb363324aa26893963dcabb02bb25e440ebdf42e30963f1a4e0"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c72608e70f053643437bd2be0608f7f1c46d4022e4104d76826f0839199347a"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe2305e6232ef3e8f40bfc0f0f3a04def9aab514910fa4203bafbc0bb4682"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:03576e3a423d19dda13e55598f0fd507b5d660d42c51b02df4e0d97824fdcae3"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:549c3584993772e25f02d0656ac48abdda73169fe347263948cf2b1cead622f3"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:34422d5a69a60b7e9a07a690094e824b66f5ddc662a5fc600d65b7c174a05f04"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f580c651a72b75c39e311343fe6875d6f58cf51c471a97f15a938d9fe4e0d37"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3399dd8a7495bbb2bacd59b84840eef9057826c664472e86c91d675d007137f5"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d1f86f3f4e2388aa3310b50694ac44daefbd1681def26b4519bd050a398dc5a"}, + {file = "regex-2024.4.16-cp37-cp37m-win32.whl", hash = "sha256:dd5acc0a7d38fdc7a3a6fd3ad14c880819008ecb3379626e56b163165162cc46"}, + {file = "regex-2024.4.16-cp37-cp37m-win_amd64.whl", hash = "sha256:ba8122e3bb94ecda29a8de4cf889f600171424ea586847aa92c334772d200331"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:743deffdf3b3481da32e8a96887e2aa945ec6685af1cfe2bcc292638c9ba2f48"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7571f19f4a3fd00af9341c7801d1ad1967fc9c3f5e62402683047e7166b9f2b4"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df79012ebf6f4efb8d307b1328226aef24ca446b3ff8d0e30202d7ebcb977a8c"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e757d475953269fbf4b441207bb7dbdd1c43180711b6208e129b637792ac0b93"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4313ab9bf6a81206c8ac28fdfcddc0435299dc88cad12cc6305fd0e78b81f9e4"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d83c2bc678453646f1a18f8db1e927a2d3f4935031b9ad8a76e56760461105dd"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df1bfef97db938469ef0a7354b2d591a2d438bc497b2c489471bec0e6baf7c4"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62120ed0de69b3649cc68e2965376048793f466c5a6c4370fb27c16c1beac22d"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c2ef6f7990b6e8758fe48ad08f7e2f66c8f11dc66e24093304b87cae9037bb4a"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8fc6976a3395fe4d1fbeb984adaa8ec652a1e12f36b56ec8c236e5117b585427"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:03e68f44340528111067cecf12721c3df4811c67268b897fbe695c95f860ac42"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ec7e0043b91115f427998febaa2beb82c82df708168b35ece3accb610b91fac1"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c21fc21a4c7480479d12fd8e679b699f744f76bb05f53a1d14182b31f55aac76"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12f6a3f2f58bb7344751919a1876ee1b976fe08b9ffccb4bbea66f26af6017b9"}, + {file = "regex-2024.4.16-cp38-cp38-win32.whl", hash = "sha256:479595a4fbe9ed8f8f72c59717e8cf222da2e4c07b6ae5b65411e6302af9708e"}, + {file = "regex-2024.4.16-cp38-cp38-win_amd64.whl", hash = "sha256:0534b034fba6101611968fae8e856c1698da97ce2efb5c2b895fc8b9e23a5834"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7ccdd1c4a3472a7533b0a7aa9ee34c9a2bef859ba86deec07aff2ad7e0c3b94"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f2f017c5be19984fbbf55f8af6caba25e62c71293213f044da3ada7091a4455"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:803b8905b52de78b173d3c1e83df0efb929621e7b7c5766c0843704d5332682f"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:684008ec44ad275832a5a152f6e764bbe1914bea10968017b6feaecdad5736e0"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65436dce9fdc0aeeb0a0effe0839cb3d6a05f45aa45a4d9f9c60989beca78b9c"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea355eb43b11764cf799dda62c658c4d2fdb16af41f59bb1ccfec517b60bcb07"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c1165f3809ce7774f05cb74e5408cd3aa93ee8573ae959a97a53db3ca3180d"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cccc79a9be9b64c881f18305a7c715ba199e471a3973faeb7ba84172abb3f317"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00169caa125f35d1bca6045d65a662af0202704489fada95346cfa092ec23f39"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6cc38067209354e16c5609b66285af17a2863a47585bcf75285cab33d4c3b8df"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:23cff1b267038501b179ccbbd74a821ac4a7192a1852d1d558e562b507d46013"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d320b3bf82a39f248769fc7f188e00f93526cc0fe739cfa197868633d44701"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:89ec7f2c08937421bbbb8b48c54096fa4f88347946d4747021ad85f1b3021b3c"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4918fd5f8b43aa7ec031e0fef1ee02deb80b6afd49c85f0790be1dc4ce34cb50"}, + {file = "regex-2024.4.16-cp39-cp39-win32.whl", hash = "sha256:684e52023aec43bdf0250e843e1fdd6febbe831bd9d52da72333fa201aaa2335"}, + {file = "regex-2024.4.16-cp39-cp39-win_amd64.whl", hash = "sha256:e697e1c0238133589e00c244a8b676bc2cfc3ab4961318d902040d099fec7483"}, + {file = "regex-2024.4.16.tar.gz", hash = "sha256:fa454d26f2e87ad661c4f0c5a5fe4cf6aab1e307d1b94f16ffdfcb089ba685c0"}, ] [[package]] name = "requests" -version = "2.32.3" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] @@ -4614,28 +4642,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.4.8" +version = "0.4.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"}, - {file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"}, - {file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"}, - {file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"}, - {file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"}, - {file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"}, + {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2d9ef6231e3fbdc0b8c72404a1a0c46fd0dcea84efca83beb4681c318ea6a953"}, + {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9485f54a7189e6f7433e0058cf8581bee45c31a25cd69009d2a040d1bd4bfaef"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2921ac03ce1383e360e8a95442ffb0d757a6a7ddd9a5be68561a671e0e5807e"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eec8d185fe193ad053eda3a6be23069e0c8ba8c5d20bc5ace6e3b9e37d246d3f"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa27d9d72a94574d250f42b7640b3bd2edc4c58ac8ac2778a8c82374bb27984"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f1ee41580bff1a651339eb3337c20c12f4037f6110a36ae4a2d864c52e5ef954"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0926cefb57fc5fced629603fbd1a23d458b25418681d96823992ba975f050c2b"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6e37f2e3cd74496a74af9a4fa67b547ab3ca137688c484749189bf3a686ceb"}, + {file = "ruff-0.4.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd703a5975ac1998c2cc5e9494e13b28f31e66c616b0a76e206de2562e0843c"}, + {file = "ruff-0.4.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b92f03b4aa9fa23e1799b40f15f8b95cdc418782a567d6c43def65e1bbb7f1cf"}, + {file = "ruff-0.4.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c859f294f8633889e7d77de228b203eb0e9a03071b72b5989d89a0cf98ee262"}, + {file = "ruff-0.4.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b34510141e393519a47f2d7b8216fec747ea1f2c81e85f076e9f2910588d4b64"}, + {file = "ruff-0.4.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e68d248ed688b9d69fd4d18737edcbb79c98b251bba5a2b031ce2470224bdf9"}, + {file = "ruff-0.4.1-py3-none-win32.whl", hash = "sha256:b90506f3d6d1f41f43f9b7b5ff845aeefabed6d2494307bc7b178360a8805252"}, + {file = "ruff-0.4.1-py3-none-win_amd64.whl", hash = "sha256:c7d391e5936af5c9e252743d767c564670dc3889aff460d35c518ee76e4b26d7"}, + {file = "ruff-0.4.1-py3-none-win_arm64.whl", hash = "sha256:a1eaf03d87e6a7cd5e661d36d8c6e874693cb9bc3049d110bc9a97b350680c43"}, + {file = "ruff-0.4.1.tar.gz", hash = "sha256:d592116cdbb65f8b1b7e2a2b48297eb865f6bdc20641879aa9d7b9c11d86db79"}, ] [[package]] @@ -4650,18 +4678,19 @@ files = [ [[package]] name = "setuptools" -version = "70.0.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "singledispatch" @@ -4691,17 +4720,17 @@ files = [ [[package]] name = "slack-sdk" -version = "3.28.0" +version = "3.27.1" description = "The Slack API Platform SDK for Python" optional = false python-versions = ">=3.6" files = [ - {file = "slack_sdk-3.28.0-py2.py3-none-any.whl", hash = "sha256:1a47700ae20566575ce494d1d1b6f594b011d06aad28e3b8e28c052cad1d6c4c"}, - {file = "slack_sdk-3.28.0.tar.gz", hash = "sha256:e6ece5cb70850492637e002e3b0d26d307939f4a33203b88cb274f7475c9a144"}, + {file = "slack_sdk-3.27.1-py2.py3-none-any.whl", hash = "sha256:c108e509160cf1324c5c8b1f47ca52fb5e287021b8caf9f4ec78ad737ab7b1d9"}, + {file = "slack_sdk-3.27.1.tar.gz", hash = "sha256:85d86b34d807c26c8bb33c1569ec0985876f06ae4a2692afba765b7a5490d28c"}, ] [package.extras] -optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<13)"] +optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)", "websockets (>=9.1,<10)"] [[package]] name = "smmap" @@ -4738,13 +4767,13 @@ files = [ [[package]] name = "social-auth-app-django" -version = "5.4.1" +version = "5.4.0" description = "Python Social Authentication, Django integration." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-app-django-5.4.1.tar.gz", hash = "sha256:2a43cde559dd34fdc7132417b6c52c780fa99ec2332dee9f405b4763f371c367"}, - {file = "social_auth_app_django-5.4.1-py3-none-any.whl", hash = "sha256:7519f186c63c50f2d364457b236f051338d194bcface55e318a6a705c5213477"}, + {file = "social-auth-app-django-5.4.0.tar.gz", hash = "sha256:09ac02a063cb313eed5e9ef2f9ac4477c8bf5bbd685925ff3aba43f9072f1bbb"}, + {file = "social_auth_app_django-5.4.0-py3-none-any.whl", hash = "sha256:28c65b2e2092f30cdb3cf912eeaa6988b49fdf4001b29bd89e683673d700a38e"}, ] [package.dependencies] @@ -4753,13 +4782,13 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.4" +version = "4.5.3" description = "Python social authentication made simple." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.4.tar.gz", hash = "sha256:d3dbeb0999ffd0e68aa4bd73f2ac698a18133fd11b3fc890e1366f18c8889fac"}, - {file = "social_auth_core-4.5.4-py3-none-any.whl", hash = "sha256:33cf970a623c442376f9d4a86fb187579e4438649daa5b5be993d05e74d7b2db"}, + {file = "social-auth-core-4.5.3.tar.gz", hash = "sha256:9d9b51b7ce2ccd0b7139e6b7f52a32cb922726de819fb13babe35f12ae89852a"}, + {file = "social_auth_core-4.5.3-py3-none-any.whl", hash = "sha256:8d16e66eb97bb7be43a023d6efa16628cdc94cefd8d8053930c98a0f676867e7"}, ] [package.dependencies] @@ -5037,13 +5066,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.12.4" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, + {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, ] [[package]] @@ -5107,13 +5136,13 @@ files = [ [[package]] name = "types-requests" -version = "2.32.0.20240602" +version = "2.31.0.20240406" description = "Typing stubs for requests" optional = true python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240602.tar.gz", hash = "sha256:3f98d7bbd0dd94ebd10ff43a7fbe20c3b8528acace6d8efafef0b6a184793f06"}, - {file = "types_requests-2.32.0.20240602-py3-none-any.whl", hash = "sha256:ed3946063ea9fbc6b5fc0c44fa279188bae42d582cb63760be6cb4b9d06c3de8"}, + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, ] [package.dependencies] @@ -5132,13 +5161,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -5193,43 +5222,40 @@ files = [ [[package]] name = "watchdog" -version = "4.0.1" +version = "4.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -5400,18 +5426,18 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.19.2" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] aci = ["PyYAML"] From 51c0cb83f7866b2670aefa3606249d6d6ce31dc8 Mon Sep 17 00:00:00 2001 From: James Williams Date: Tue, 18 Jun 2024 13:08:33 -0500 Subject: [PATCH 208/229] resolve testing issues --- nautobot_ssot/tests/itential/fixtures/devices.py | 2 +- nautobot_ssot/tests/itential/fixtures/gateways.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/itential/fixtures/devices.py b/nautobot_ssot/tests/itential/fixtures/devices.py index f46bd8c9a..fc007666b 100644 --- a/nautobot_ssot/tests/itential/fixtures/devices.py +++ b/nautobot_ssot/tests/itential/fixtures/devices.py @@ -91,7 +91,7 @@ def update_or_create_device_object( config_context: dict = {}, ): # pylint: disable=dangerous-default-value,too-many-arguments,too-many-locals """Create or update device fixtures.""" - status = Status.objects.get(name=status) + status, _ = Status.objects.get_or_create(name=status) namespace, _ = Namespace.objects.get_or_create(name="Global") Prefix.objects.update_or_create(prefix="192.0.2.0/24", namespace=namespace, status=status) device_content_type = ContentType.objects.get_for_model(Device) diff --git a/nautobot_ssot/tests/itential/fixtures/gateways.py b/nautobot_ssot/tests/itential/fixtures/gateways.py index 483805447..78926179c 100644 --- a/nautobot_ssot/tests/itential/fixtures/gateways.py +++ b/nautobot_ssot/tests/itential/fixtures/gateways.py @@ -163,7 +163,7 @@ def update_or_create_automation_gateways( ): # pylint: disable=too-many-arguments,too-many-locals """Fixture to populate Automation Gateways.""" # Fetch the active status - status = Status.objects.get(name="Active") + status, _ = Status.objects.get_or_create(name="Active") # Create a region location type location_type, _ = LocationType.objects.update_or_create(name="Region") From 18ce97d63973f48234de80492ca8e3c9795acb86 Mon Sep 17 00:00:00 2001 From: James Williams Date: Fri, 21 Jun 2024 13:07:58 -0500 Subject: [PATCH 209/229] Update docs/admin/integrations/itential_setup.md Co-authored-by: Justin Drew <2396364+jdrew82@users.noreply.github.com> --- docs/admin/integrations/itential_setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/integrations/itential_setup.md b/docs/admin/integrations/itential_setup.md index 286e3a124..70b6c8123 100644 --- a/docs/admin/integrations/itential_setup.md +++ b/docs/admin/integrations/itential_setup.md @@ -36,7 +36,7 @@ When assigning secrets to a secrets group, please refer to the table below to co | Secret Description | Access Type | Secret Type | +-----------------------+-------------+-------------+ -| Itnetial API username | REST | Username | +| Itential API username | REST | Username | | Itential API password | REST | Password | | Device username | GENERIC | Username | | Device password | GENERIC | Password | From 00439ad53d8de7a9f5571ed37c9294247889673e Mon Sep 17 00:00:00 2001 From: James Williams Date: Fri, 21 Jun 2024 14:47:36 -0500 Subject: [PATCH 210/229] Add user documentation --- docs/admin/integrations/itential_setup.md | 2 +- docs/user/integrations/index.md | 1 + docs/user/integrations/itential.md | 38 +++++++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 docs/user/integrations/itential.md diff --git a/docs/admin/integrations/itential_setup.md b/docs/admin/integrations/itential_setup.md index 70b6c8123..ae1479ce7 100644 --- a/docs/admin/integrations/itential_setup.md +++ b/docs/admin/integrations/itential_setup.md @@ -35,7 +35,7 @@ The Itential integration necessitates four secret values: (1) Itential API acces When assigning secrets to a secrets group, please refer to the table below to correctly assign each secret to its respective access type and secret type. | Secret Description | Access Type | Secret Type | -+-----------------------+-------------+-------------+ +|-----------------------|-------------|-------------| | Itential API username | REST | Username | | Itential API password | REST | Password | | Device username | GENERIC | Username | diff --git a/docs/user/integrations/index.md b/docs/user/integrations/index.md index b3b1869e0..bb5b03b2e 100644 --- a/docs/user/integrations/index.md +++ b/docs/user/integrations/index.md @@ -7,4 +7,5 @@ This Nautobot app supports the following integrations: - [Device42](./device42.md) - [Infoblox](./infoblox.md) - [IPFabric](./ipfabric.md) +- [Itential](./itential.md) - [ServiceNow](./servicenow.md) diff --git a/docs/user/integrations/itential.md b/docs/user/integrations/itential.md new file mode 100644 index 000000000..2580b76e7 --- /dev/null +++ b/docs/user/integrations/itential.md @@ -0,0 +1,38 @@ +## Itential Automation Gateway SSoT Integration + +The Itential SSoT integration is built as part of the Nautobot Single Source of Truth (SSoT) app. This app enables Nautobot to serve as the aggregation point for inventories in the Itential Automation Gateway (IAG). + +## IAG Inventory + +The IAG can communicate with network devices using several methods. Each of these communication methods requires a separate inventory in the IAG database. Currently, only the Ansible/Scripts inventory is supported. Support for other inventories may be added at a later date. + +Below is a table displaying the IAG communication methods. + +| Communication Method | +| ---------------------| +| Ansible | +| GRPC | +| HTTP Requests | +| NETCONF | +| Netmiko | +| Nornir | +| Scripts | +| Terraform | + +As a side note, you can create your own scripts or Ansible playbooks that utilize communication methods such as GRPC, HTTP, NETCONF, etc. Itential will use the default Ansible inventory to perform tasks on remote devices with these communication methods. However, if you want the Itential Automation Platform (IAP) to communicate with a remote device using a communication method that is not Ansible or scripts, it requires a separate inventory and iteration by the SSoT to support those inventories. + +## Nautobot to Itential Automation Gateway Modeling + +### Device Modeling + +Currently, the Itential SSoT integration supports only a one-way sync from Nautobot to the (IAG) devices. For a device object to be synced to an (IAG) host, certain data is required in Nautobot: + +1. The device must have an [RFC 1123](https://www.rfc-editor.org/rfc/rfc1123) compliant hostname. The IAG will respond with an HTTP error for non-compliant hostnames. +2. The device must have a management IP address assigned in Nautobot. This management IP address is used to assign the `ansible_host` variable in the IAG inventory. +3. For Ansible to determine how to communicate with a remote device, the device needs to be assigned to a platform in Nautobot. The platform must have an appropriate network driver assigned. The Itential SSoT Integration will use this network driver to determine how to assign the `ansible_network_os` Ansible variable in the IAG inventory. + +Additional device variables can be assigned to IAG inventories by utilizing Nautobot config contexts. Config contexts will be added to the IAG Ansible inventory as a one-to-one mapping. These config contexts can also be used to override variables for a device, such as the `ansible_network_os`. + +### Ansible Default Group Modeling + +Ansible uses a default group called `all` to define variables that are common across all devices. More specific groups and devices can override the variables defined in the `all` group. The Itential SSoT integration uses the `all` group to define the `ansible_username` and `ansible_password` variables, which IAG uses to communicate with remote devices. The Itential SSoT integration consumes the device secrets defined in the Itential Setup in the admin section of this documentation. From 7914802a371b7ddc6b49fd1c30dd5f2940712a24 Mon Sep 17 00:00:00 2001 From: James Williams Date: Wed, 26 Jun 2024 12:33:22 -0500 Subject: [PATCH 211/229] update docs --- docs/user/integrations/itential.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user/integrations/itential.md b/docs/user/integrations/itential.md index 2580b76e7..c948228fa 100644 --- a/docs/user/integrations/itential.md +++ b/docs/user/integrations/itential.md @@ -19,7 +19,7 @@ Below is a table displaying the IAG communication methods. | Scripts | | Terraform | -As a side note, you can create your own scripts or Ansible playbooks that utilize communication methods such as GRPC, HTTP, NETCONF, etc. Itential will use the default Ansible inventory to perform tasks on remote devices with these communication methods. However, if you want the Itential Automation Platform (IAP) to communicate with a remote device using a communication method that is not Ansible or scripts, it requires a separate inventory and iteration by the SSoT to support those inventories. +As a side note, you can create your own scripts or Ansible playbooks that utilize communication methods such as GRPC, HTTP, NETCONF, etc. Itential will use the default Ansible inventory to perform tasks on remote devices with these communication methods. However, if you want the Itential Automation Platform (IAP) to communicate with a remote device using a communication method that is not Ansible or scripts, it requires a separate inventory and iteration by the SSoT App to support those inventories. ## Nautobot to Itential Automation Gateway Modeling From ee2e9b12cc51032aaa8b79da6cb4c38fb67f10a7 Mon Sep 17 00:00:00 2001 From: James Williams Date: Wed, 26 Jun 2024 12:39:41 -0500 Subject: [PATCH 212/229] resolve utils --- nautobot_ssot/integrations/utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/utils.py b/nautobot_ssot/integrations/utils.py index 30103f755..810e959ae 100644 --- a/nautobot_ssot/integrations/utils.py +++ b/nautobot_ssot/integrations/utils.py @@ -23,10 +23,7 @@ def each_enabled_integration_module(module_name: str) -> Generator[ModuleType, N """For each enabled integration, import the module name.""" for name in each_enabled_integration(): try: - if api: - module = import_module(f"nautobot_ssot.integrations.{name}.api.{module_name}") - else: - module = import_module(f"nautobot_ssot.integrations.{name}.{module_name}") + module = import_module(f"nautobot_ssot.integrations.{name}.{module_name}") except ModuleNotFoundError: logger.debug("Integration %s does not have a %s module, skipping.", name, module_name) continue From b444bf394f65fbb6ab85e19bc2750cdbb98acb2f Mon Sep 17 00:00:00 2001 From: James Williams Date: Wed, 26 Jun 2024 14:10:14 -0500 Subject: [PATCH 213/229] update itential model migrations --- ...ationgatewaymodel.py => 0010_automationgatewaymodel.py} | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) rename nautobot_ssot/migrations/{0009_automationgatewaymodel.py => 0010_automationgatewaymodel.py} (95%) diff --git a/nautobot_ssot/migrations/0009_automationgatewaymodel.py b/nautobot_ssot/migrations/0010_automationgatewaymodel.py similarity index 95% rename from nautobot_ssot/migrations/0009_automationgatewaymodel.py rename to nautobot_ssot/migrations/0010_automationgatewaymodel.py index c6e64e9d8..d7f411d45 100644 --- a/nautobot_ssot/migrations/0009_automationgatewaymodel.py +++ b/nautobot_ssot/migrations/0010_automationgatewaymodel.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-04-14 02:48 +# Generated by Django 3.2.23 on 2024-06-26 19:01 import django.core.serializers.json from django.db import migrations, models @@ -9,10 +9,11 @@ class Migration(migrations.Migration): + dependencies = [ - ("dcim", "0052_fix_interface_redundancy_group_created"), ("extras", "0102_set_null_objectchange_contenttype"), - ("nautobot_ssot", "0008_auto_20240110_1019"), + ("dcim", "0052_fix_interface_redundancy_group_created"), + ("nautobot_ssot", "0009_ssotconfig_ssotinfobloxconfig"), ] operations = [ From 45e23f1fc5ad69c259deef1555c17d103c0f7218 Mon Sep 17 00:00:00 2001 From: Ken Celenza Date: Sat, 4 May 2024 13:59:42 -0400 Subject: [PATCH 214/229] Fixed link from list view to filtered sync log view by changing filter query to `sync` from overview. --- changes/437.fixed | 1 + nautobot_ssot/tables.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changes/437.fixed diff --git a/changes/437.fixed b/changes/437.fixed new file mode 100644 index 000000000..7b754958f --- /dev/null +++ b/changes/437.fixed @@ -0,0 +1 @@ +Fixed link from list view to filtered sync log view by changing filter query to `sync` from overview. \ No newline at end of file diff --git a/nautobot_ssot/tables.py b/nautobot_ssot/tables.py index eb953259f..38cfde649 100644 --- a/nautobot_ssot/tables.py +++ b/nautobot_ssot/tables.py @@ -10,7 +10,7 @@ ACTION_LOGS_LINK = """ + href="{% url 'plugins:nautobot_ssot:synclogentry_list' %}?sync={{ record.id }}&action={{ action }}"> {{ value }} """ @@ -18,7 +18,7 @@ STATUS_LOGS_LINK = """ + href="{% url 'plugins:nautobot_ssot:synclogentry_list' %}?sync={{ record.id }}&status={{ status }}"> {{ value }} """ From 483c3b8bd01ef79279ce133446c1a08a2db2c11d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jul 2024 11:10:24 -0500 Subject: [PATCH 215/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Change=20string?= =?UTF-8?q?=20to=20be=20self.device=5Ftype.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aci/diffsync/adapters/aci.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py index 9ea0ae0ad..04a518601 100644 --- a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py +++ b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py @@ -410,7 +410,7 @@ def load_devices(self): if not model: self.get_or_instantiate( - "device_type", + self.device_type, ids={"model": value["model"], "part_nbr": ""}, attrs={"manufacturer": "Cisco", "u_height": 1, "comments": ""}, ) From d0936c5473698d0f70cefd71388955ae220fe718 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jul 2024 11:11:17 -0500 Subject: [PATCH 216/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20fragment?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/479.fixed | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/479.fixed diff --git a/changes/479.fixed b/changes/479.fixed new file mode 100644 index 000000000..a6031e9e8 --- /dev/null +++ b/changes/479.fixed @@ -0,0 +1 @@ +Correct get_or_instantiate() to use self.device_type instead of "device_type" in ACI adapter. \ No newline at end of file From f88858abbc03b358724b3eb8086e2403f2b212a9 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jul 2024 11:40:23 -0500 Subject: [PATCH 217/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Refactor=20code?= =?UTF-8?q?=20to=20handle=20possibility=20of=20device=5Fspecs=20being=20un?= =?UTF-8?q?defined.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aci/diffsync/adapters/aci.py | 91 ++++++++++--------- 1 file changed, 46 insertions(+), 45 deletions(-) diff --git a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py index 04a518601..d7ccf8abc 100644 --- a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py +++ b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py @@ -335,57 +335,58 @@ def load_interfaces(self): fn = os.path.join(devicetype_file_path, f"{device['model']}.yaml") if os.path.exists(fn): device_specs = load_yamlfile(fn) - for interface_name, interface in interfaces[device_name].items(): - if_list = [ - intf - for intf in device_specs["interfaces"] - if intf["name"] == interface_name.replace("eth", "Ethernet") - ] - if if_list: - intf_type = if_list[0]["type"] - else: - intf_type = "other" - new_interface = self.interface( - name=interface_name.replace("eth", "Ethernet"), - device=device["name"], - site=self.site, - description=interface["descr"], - gbic_vendor=interface["gbic_vendor"], - gbic_type=interface["gbic_type"], - gbic_sn=interface["gbic_sn"], - gbic_model=interface["gbic_model"], - state=interface["state"], - type=intf_type, - site_tag=self.site, - ) - self.add(new_interface) + if device_specs and device_specs.get("interfaces"): + for interface_name, interface in interfaces[device_name].items(): + if_list = [ + intf + for intf in device_specs["interfaces"] + if intf["name"] == interface_name.replace("eth", "Ethernet") + ] + if if_list: + intf_type = if_list[0]["type"] + else: + intf_type = "other" + new_interface = self.interface( + name=interface_name.replace("eth", "Ethernet"), + device=device["name"], + site=self.site, + description=interface["descr"], + gbic_vendor=interface["gbic_vendor"], + gbic_type=interface["gbic_type"], + gbic_sn=interface["gbic_sn"], + gbic_model=interface["gbic_model"], + state=interface["state"], + type=intf_type, + site_tag=self.site, + ) + self.add(new_interface) + for _interface in device_specs["interfaces"]: + if_list = [intf for intf in device_specs["interfaces"] if intf["name"] == _interface] + if if_list: + intf_type = if_list[0]["type"] + else: + intf_type = "other" + if re.match("^Eth[0-9]|^mgmt[0-9]", _interface["name"]): + new_interface = self.interface( + name=_interface["name"], + device=device["name"], + site=self.site, + description="", + gbic_vendor="", + gbic_type="", + gbic_sn="", + gbic_model="", + state="up", + type=intf_type, + site_tag=self.site, + ) + self.add(new_interface) else: logger.warning( "No YAML file exists in device-types for model %s, skipping interface creation", device["model"], ) - for _interface in device_specs["interfaces"]: - if_list = [intf for intf in device_specs["interfaces"] if intf["name"] == _interface] - if if_list: - intf_type = if_list[0]["type"] - else: - intf_type = "other" - if re.match("^Eth[0-9]|^mgmt[0-9]", _interface["name"]): - new_interface = self.interface( - name=_interface["name"], - device=device["name"], - site=self.site, - description="", - gbic_vendor="", - gbic_type="", - gbic_sn="", - gbic_model="", - state="up", - type=intf_type, - site_tag=self.site, - ) - self.add(new_interface) def load_deviceroles(self): """Load device roles from ACI device data.""" From 384a65727cbca1bf739e11a6741eded0d764ddc3 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jul 2024 11:41:04 -0500 Subject: [PATCH 218/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Update=20changel?= =?UTF-8?q?og=20snippet?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/479.fixed | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/changes/479.fixed b/changes/479.fixed index a6031e9e8..518c7e61f 100644 --- a/changes/479.fixed +++ b/changes/479.fixed @@ -1 +1,2 @@ -Correct get_or_instantiate() to use self.device_type instead of "device_type" in ACI adapter. \ No newline at end of file +Correct get_or_instantiate() to use self.device_type instead of "device_type" in ACI adapter. +Refactor load_interfaces() to have check for device_specs var being defined in case file isn't loaded. \ No newline at end of file From ef8d55ab63e0f3918e26ed2e478f37ff26b2bb90 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jul 2024 13:21:02 -0500 Subject: [PATCH 219/229] =?UTF-8?q?style:=20=F0=9F=9A=A8=20Fix=20formattin?= =?UTF-8?q?g=20for=20black.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aci/diffsync/adapters/aci.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py index d7ccf8abc..cc25aa1b1 100644 --- a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py +++ b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py @@ -387,7 +387,6 @@ def load_interfaces(self): device["model"], ) - def load_deviceroles(self): """Load device roles from ACI device data.""" device_roles = {value["role"] for value in self.devices.values()} From 5c923191ee83c224c7d8e4793fea7387bf8e38a6 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 16 Jul 2024 16:32:57 -0500 Subject: [PATCH 220/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Bump?= =?UTF-8?q?=20to=202.7.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0dc80ee28..a60e824be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-ssot" -version = "2.6.1" +version = "2.7.0" description = "Nautobot Single Source of Truth" authors = ["Network to Code, LLC "] license = "Apache-2.0" From eeb4b8efb777c2818e2c284b44ebacac161388ff Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 16 Jul 2024 16:33:26 -0500 Subject: [PATCH 221/229] build: Add CVP integration to MIN_NAUTOBOT_VERSION due to ExternalIntegration usage. --- nautobot_ssot/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index 12e735e8d..82f699b19 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -25,6 +25,7 @@ ] _MIN_NAUTOBOT_VERSION = { + "nautobot_ssot_aristacv": "2.2", "nautobot_ssot_infoblox": "2.1", } From 3e96a99af1ff9dea38450b761c9d9c46ebe56e12 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 16 Jul 2024 16:57:04 -0500 Subject: [PATCH 222/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Update=20release?= =?UTF-8?q?=20notes=20and=20mkdocs=20reference?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/234.fixed | 1 - changes/432.added | 5 --- changes/437.fixed | 1 - changes/442.added | 12 ------ changes/442.changed | 4 -- changes/442.documentation | 1 - changes/442.removed | 2 - changes/443.fixed | 1 - changes/450.documentation | 1 - changes/456.fixed | 1 - changes/463.fixed | 1 - changes/469.added | 1 - changes/472.documentation | 1 - changes/479.fixed | 2 - docs/admin/release_notes/version_2.7.md | 50 +++++++++++++++++++++++++ mkdocs.yml | 1 + 16 files changed, 51 insertions(+), 34 deletions(-) delete mode 100644 changes/234.fixed delete mode 100644 changes/432.added delete mode 100644 changes/437.fixed delete mode 100644 changes/442.added delete mode 100644 changes/442.changed delete mode 100644 changes/442.documentation delete mode 100644 changes/442.removed delete mode 100644 changes/443.fixed delete mode 100644 changes/450.documentation delete mode 100644 changes/456.fixed delete mode 100644 changes/463.fixed delete mode 100644 changes/469.added delete mode 100644 changes/472.documentation delete mode 100644 changes/479.fixed create mode 100644 docs/admin/release_notes/version_2.7.md diff --git a/changes/234.fixed b/changes/234.fixed deleted file mode 100644 index 0fce00f92..000000000 --- a/changes/234.fixed +++ /dev/null @@ -1 +0,0 @@ -Fixed integration tests so they're no longer dependent upon being enabled in dev environment. \ No newline at end of file diff --git a/changes/432.added b/changes/432.added deleted file mode 100644 index ab8967c16..000000000 --- a/changes/432.added +++ /dev/null @@ -1,5 +0,0 @@ -Added an SSoT to sync Nautobot ==> Itential Automation Gateway. - -This integration allows users to sync Nautobot device inventory to Itential Automation Gateway(s) (IAG). -The current IAG inventory that is supported is its default Ansible inventory. -Netmiko, Nornir, HTTP requests inventories will be added at a later date. \ No newline at end of file diff --git a/changes/437.fixed b/changes/437.fixed deleted file mode 100644 index 7b754958f..000000000 --- a/changes/437.fixed +++ /dev/null @@ -1 +0,0 @@ -Fixed link from list view to filtered sync log view by changing filter query to `sync` from overview. \ No newline at end of file diff --git a/changes/442.added b/changes/442.added deleted file mode 100644 index 6d0a3aecc..000000000 --- a/changes/442.added +++ /dev/null @@ -1,12 +0,0 @@ -Added plugin configuration page collecting configurations for integrations. -Infoblox integration - added SSOTInfobloxConfig model used for providing Infoblox integration configuration. -Infoblox integration - added support for multiple configuration instances. -Infoblox integration - added support for Infoblox Network Views and Nautobot Namespaces. -Infoblox integration - added support for selecting a subset of Network and IP address objects loaded for synchronization. -Infoblox integration - added support for creating Infoblox IP Addresses as A and PTR records. -Infoblox integration - added support for creating Infoblox IP Addresses as Fixed Address records of type RESERVED and MAC_ADDRESS. -Infoblox integration - added support for excluding extensive attributes and custom fields when synchronizing objects. -Infoblox integration - added support for selectively enabling synchronization of IPv4 and IPv6 objects. -Infoblox integration - added support for specifying Infoblox DNS View where DNS records are created. -Infoblox integration - added support for specifying record types subject to deletion in Infoblox and Nautobot. -Infoblox integration - added methods to Infoblox handling fixed addresses, DNS A, Host and PTR records, network views, DNS views, and authoritative zones. diff --git a/changes/442.changed b/changes/442.changed deleted file mode 100644 index c531bc690..000000000 --- a/changes/442.changed +++ /dev/null @@ -1,4 +0,0 @@ -Infoblox integration - configuration settings are now defined in the instances of the SSOTInfobloxConfig model. -Infoblox integration - functionality provided by the `infoblox_import_subnets` settings has been replaced with the `infoblox_sync_filters` field in the SSOTInfobloxConfig instance. -Infoblox integration - updated Infoblox client methods to support Network View. -Infoblox integration - standardized `JSONDecoderError` handling in the Infoblox client. diff --git a/changes/442.documentation b/changes/442.documentation deleted file mode 100644 index bb1fd3a38..000000000 --- a/changes/442.documentation +++ /dev/null @@ -1 +0,0 @@ -Add missing attribution for Device42 integration to README. diff --git a/changes/442.removed b/changes/442.removed deleted file mode 100644 index 579e5a660..000000000 --- a/changes/442.removed +++ /dev/null @@ -1,2 +0,0 @@ -Infoblox integration - configuration settings defined in `nautobot_config.py` have been removed. -Infoblox integration - configuration settings defined in environmental variables have been removed. \ No newline at end of file diff --git a/changes/443.fixed b/changes/443.fixed deleted file mode 100644 index f904df814..000000000 --- a/changes/443.fixed +++ /dev/null @@ -1 +0,0 @@ -Fixed issue with loading duplicate IPAddresses from Infoblox. diff --git a/changes/450.documentation b/changes/450.documentation deleted file mode 100644 index bb1fd3a38..000000000 --- a/changes/450.documentation +++ /dev/null @@ -1 +0,0 @@ -Add missing attribution for Device42 integration to README. diff --git a/changes/456.fixed b/changes/456.fixed deleted file mode 100644 index 4f31ce0e1..000000000 --- a/changes/456.fixed +++ /dev/null @@ -1 +0,0 @@ -Fix Device42 integration unit test that was expecting wrong BIG-IP netmiko platform name. diff --git a/changes/463.fixed b/changes/463.fixed deleted file mode 100644 index 754e36d02..000000000 --- a/changes/463.fixed +++ /dev/null @@ -1 +0,0 @@ -Fixed call in CVP integration to pass `import_active` config setting to get_devices() function call. \ No newline at end of file diff --git a/changes/469.added b/changes/469.added deleted file mode 100644 index 3d94e1181..000000000 --- a/changes/469.added +++ /dev/null @@ -1 +0,0 @@ -Added more models for import in Example Jobs. \ No newline at end of file diff --git a/changes/472.documentation b/changes/472.documentation deleted file mode 100644 index 3e14c04d7..000000000 --- a/changes/472.documentation +++ /dev/null @@ -1 +0,0 @@ -Update ServiceNow documentation for Locations and FAQ error. diff --git a/changes/479.fixed b/changes/479.fixed deleted file mode 100644 index 518c7e61f..000000000 --- a/changes/479.fixed +++ /dev/null @@ -1,2 +0,0 @@ -Correct get_or_instantiate() to use self.device_type instead of "device_type" in ACI adapter. -Refactor load_interfaces() to have check for device_specs var being defined in case file isn't loaded. \ No newline at end of file diff --git a/docs/admin/release_notes/version_2.7.md b/docs/admin/release_notes/version_2.7.md new file mode 100644 index 000000000..238ce57e6 --- /dev/null +++ b/docs/admin/release_notes/version_2.7.md @@ -0,0 +1,50 @@ + +## [v2.7.0 (2024-07-16)](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v2.7.0) + +### Added + +- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - Added an SSoT to sync Nautobot ==> Itential Automation Gateway. +- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - +- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - This integration allows users to sync Nautobot device inventory to Itential Automation Gateway(s) (IAG). +- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - The current IAG inventory that is supported is its default Ansible inventory. +- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - Netmiko, Nornir, HTTP requests inventories will be added at a later date. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Added plugin configuration page collecting configurations for integrations. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added SSOTInfobloxConfig model used for providing Infoblox integration configuration. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for multiple configuration instances. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for Infoblox Network Views and Nautobot Namespaces. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for selecting a subset of Network and IP address objects loaded for synchronization. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for creating Infoblox IP Addresses as A and PTR records. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for creating Infoblox IP Addresses as Fixed Address records of type RESERVED and MAC_ADDRESS. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for excluding extensive attributes and custom fields when synchronizing objects. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for selectively enabling synchronization of IPv4 and IPv6 objects. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for specifying Infoblox DNS View where DNS records are created. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added support for specifying record types subject to deletion in Infoblox and Nautobot. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - added methods to Infoblox handling fixed addresses, DNS A, Host and PTR records, network views, DNS views, and authoritative zones. +- [#469](https://github.com/nautobot/nautobot-app-ssot/issues/469) - Added more models for import in Example Jobs. + +### Changed + +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - configuration settings are now defined in the instances of the SSOTInfobloxConfig model. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - functionality provided by the `infoblox_import_subnets` settings has been replaced with the `infoblox_sync_filters` field in the SSOTInfobloxConfig instance. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - updated Infoblox client methods to support Network View. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - standardized `JSONDecoderError` handling in the Infoblox client. + +### Removed + +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - configuration settings defined in `nautobot_config.py` have been removed. +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442) - Infoblox integration - configuration settings defined in environmental variables have been removed. + +### Fixed + +- [#234](https://github.com/nautobot/nautobot-app-ssot/issues/234) - Fixed integration tests so they're no longer dependent upon being enabled in dev environment. +- [#437](https://github.com/nautobot/nautobot-app-ssot/issues/437) - Fixed link from list view to filtered sync log view by changing filter query to `sync` from overview. +- [#443](https://github.com/nautobot/nautobot-app-ssot/issues/443) - Fixed issue with loading duplicate IPAddresses from Infoblox. +- [#456](https://github.com/nautobot/nautobot-app-ssot/issues/456) - Fix Device42 integration unit test that was expecting wrong BIG-IP netmiko platform name. +- [#463](https://github.com/nautobot/nautobot-app-ssot/issues/463) - Fixed call in CVP integration to pass `import_active` config setting to get_devices() function call. +- [#479](https://github.com/nautobot/nautobot-app-ssot/issues/479) - Correct get_or_instantiate() to use self.device_type instead of "device_type" in ACI adapter. +- [#479](https://github.com/nautobot/nautobot-app-ssot/issues/479) - Refactor load_interfaces() to have check for device_specs var being defined in case file isn't loaded. + +### Documentation + +- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442), [#450](https://github.com/nautobot/nautobot-app-ssot/issues/450) - Add missing attribution for Device42 integration to README. +- [#472](https://github.com/nautobot/nautobot-app-ssot/issues/472) - Update ServiceNow documentation for Locations and FAQ error. diff --git a/mkdocs.yml b/mkdocs.yml index ded5b3db3..3081d12c6 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -132,6 +132,7 @@ nav: - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" + - v2.7: "admin/release_notes/version_2.7.md" - v2.6: "admin/release_notes/version_2.6.md" - v2.5: "admin/release_notes/version_2.5.md" - v2.4: "admin/release_notes/version_2.4.md" From 2d686ff3c0360a13df5520f3177ea0fe3c75d6b5 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 16 Jul 2024 16:58:34 -0500 Subject: [PATCH 223/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Remove=20empty?= =?UTF-8?q?=20change=20from=20release=20note.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/release_notes/version_2.7.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/admin/release_notes/version_2.7.md b/docs/admin/release_notes/version_2.7.md index 238ce57e6..eeb810bbb 100644 --- a/docs/admin/release_notes/version_2.7.md +++ b/docs/admin/release_notes/version_2.7.md @@ -4,7 +4,6 @@ ### Added - [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - Added an SSoT to sync Nautobot ==> Itential Automation Gateway. -- [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - - [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - This integration allows users to sync Nautobot device inventory to Itential Automation Gateway(s) (IAG). - [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - The current IAG inventory that is supported is its default Ansible inventory. - [#432](https://github.com/nautobot/nautobot-app-ssot/issues/432) - Netmiko, Nornir, HTTP requests inventories will be added at a later date. From d2324f10d10bf49fdc764f9cb8df9216bd51b61c Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jul 2024 08:12:40 -0500 Subject: [PATCH 224/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Remove=20duplica?= =?UTF-8?q?te=20change=20log=20for=20442=20documentation.=20It's=20duplica?= =?UTF-8?q?ting=20450.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/release_notes/version_2.7.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/release_notes/version_2.7.md b/docs/admin/release_notes/version_2.7.md index eeb810bbb..aff9e964a 100644 --- a/docs/admin/release_notes/version_2.7.md +++ b/docs/admin/release_notes/version_2.7.md @@ -45,5 +45,5 @@ ### Documentation -- [#442](https://github.com/nautobot/nautobot-app-ssot/issues/442), [#450](https://github.com/nautobot/nautobot-app-ssot/issues/450) - Add missing attribution for Device42 integration to README. +- [#450](https://github.com/nautobot/nautobot-app-ssot/issues/450) - Add missing attribution for Device42 integration to README. - [#472](https://github.com/nautobot/nautobot-app-ssot/issues/472) - Update ServiceNow documentation for Locations and FAQ error. From 1d78b05a5ea9ecf75c404626ba10a0eb55596146 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jul 2024 08:13:21 -0500 Subject: [PATCH 225/229] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Add=20changelog?= =?UTF-8?q?=20snippet=20for=20release?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changes/484.housekeeping | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/484.housekeeping diff --git a/changes/484.housekeeping b/changes/484.housekeeping new file mode 100644 index 000000000..881c35ec0 --- /dev/null +++ b/changes/484.housekeeping @@ -0,0 +1 @@ +Releasing 2.7.0 \ No newline at end of file From 815f7ef598ab0a436116d8b85656d25a41ef83e3 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jul 2024 11:04:50 -0500 Subject: [PATCH 226/229] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20minimum?= =?UTF-8?q?=20for=20CVP=20integration=20to=202.1.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index 82f699b19..d0a261b75 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -25,7 +25,7 @@ ] _MIN_NAUTOBOT_VERSION = { - "nautobot_ssot_aristacv": "2.2", + "nautobot_ssot_aristacv": "2.1", "nautobot_ssot_infoblox": "2.1", } From 56b2ca5acbfd16823624a47e58fd709b90c7af84 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jul 2024 11:05:38 -0500 Subject: [PATCH 227/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Disabl?= =?UTF-8?q?e=20Itential=20integration=20in=20environment=20vars.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/development.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/development.env b/development/development.env index 930f62db5..2cdf32b89 100644 --- a/development/development.env +++ b/development/development.env @@ -97,4 +97,4 @@ IPFABRIC_HOST="https://ipfabric.example.com" IPFABRIC_SSL_VERIFY="True" IPFABRIC_TIMEOUT=15 -NAUTOBOT_SSOT_ENABLE_ITENTIAL="True" +NAUTOBOT_SSOT_ENABLE_ITENTIAL="False" From 5af8129d9ec53fda9ed4f1110a27cfb80a607252 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jul 2024 11:08:34 -0500 Subject: [PATCH 228/229] =?UTF-8?q?test:=20=E2=9C=85=20Add=20override=5Fse?= =?UTF-8?q?ttings=20for=20Itetial=20integration=20to=20have=20Job=20instal?= =?UTF-8?q?led=20for=20testing.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/itential/test_jobs.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nautobot_ssot/tests/itential/test_jobs.py b/nautobot_ssot/tests/itential/test_jobs.py index 6d940fbeb..9ede55df1 100644 --- a/nautobot_ssot/tests/itential/test_jobs.py +++ b/nautobot_ssot/tests/itential/test_jobs.py @@ -1,5 +1,6 @@ """Itential SSoT Jobs Test Cases.""" +from django.test import override_settings from nautobot.extras.models import Job, JobLogEntry from nautobot.apps.testing import run_job_for_testing @@ -8,6 +9,13 @@ from nautobot_ssot.integrations.itential.models import AutomationGatewayModel +@override_settings( + PLUGINS_CONFIG={ + "nautobot_ssot": { + "enable_itential": True, + } + } +) class ItentialSSoTJobsTestCase(base.ItentialSSoTBaseTransactionTestCase): """Itential SSoT Jobs Test Cases.""" From 847ae19e81b248e8db087b4318c6542b7f709124 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 19 Jul 2024 09:10:05 -0500 Subject: [PATCH 229/229] =?UTF-8?q?build:=20=F0=9F=8F=97=EF=B8=8F=20Re-ena?= =?UTF-8?q?bling=20Itential=20integration=20for=20now=20so=20CI=20passes?= =?UTF-8?q?=20while=20we=20figure=20out=20how=20to=20activate=20an=20integ?= =?UTF-8?q?ration=20just=20for=20tests.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/development.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/development.env b/development/development.env index 2cdf32b89..930f62db5 100644 --- a/development/development.env +++ b/development/development.env @@ -97,4 +97,4 @@ IPFABRIC_HOST="https://ipfabric.example.com" IPFABRIC_SSL_VERIFY="True" IPFABRIC_TIMEOUT=15 -NAUTOBOT_SSOT_ENABLE_ITENTIAL="False" +NAUTOBOT_SSOT_ENABLE_ITENTIAL="True"