Use Counter to find duplicate peer relationships

Gives a bit more readability.
This commit is contained in:
Jeff Bradberry
2024-01-25 10:15:16 -05:00
committed by Seth Foster
parent cc4cc37d46
commit 957ce59bf7

View File

@@ -6,7 +6,7 @@ import copy
import json import json
import logging import logging
import re import re
from collections import OrderedDict from collections import Counter, OrderedDict
from datetime import timedelta from datetime import timedelta
from uuid import uuid4 from uuid import uuid4
@@ -5697,10 +5697,9 @@ class InstanceSerializer(BaseSerializer):
if set(p.instance.peers.all()) & instance_addresses: if set(p.instance.peers.all()) & instance_addresses:
raise serializers.ValidationError(_(f"Instance {p.instance.hostname} is already peered to this instance.")) raise serializers.ValidationError(_(f"Instance {p.instance.hostname} is already peered to this instance."))
# cannot peer to instance more than once # cannot peer to an instance more than once
# compare length of set to original list to check for duplicates peers_instances = Counter(p.instance_id for p in attrs.get('peers', []))
peers_instances = [p.instance for p in attrs.get('peers', [])] if any(count > 1 for count in peers_instances.values()):
if len(set(peers_instances)) != len(peers_instances):
raise serializers.ValidationError(_("Cannot peer to the same instance more than once.")) raise serializers.ValidationError(_("Cannot peer to the same instance more than once."))
# cannot enable peers_from_control_nodes if listener_port is not set # cannot enable peers_from_control_nodes if listener_port is not set