Skip to content

Commit

Permalink
chore: improve duplicate address detection (#249)
Browse files Browse the repository at this point in the history
  • Loading branch information
jvoss authored Dec 30, 2024
1 parent 2269655 commit a222240
Showing 1 changed file with 11 additions and 14 deletions.
25 changes: 11 additions & 14 deletions validate_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,18 +41,9 @@ def main():

node_type = node_types[router]

# collect and ensure peer addrs are unique per router
peer_ipv4_addrs = [peer["ipv4"] for peer in peers if "ipv4" in peer]
peer_ipv6_addrs = [peer["ipv6"] for peer in peers if "ipv6" in peer]

for peer in peers:
peer_errors = list(validate(node_type, peer))

if not validate_unique_peers(peer_ipv4_addrs):
peer_errors.append("ipv4 address must be unique per router")

if not validate_unique_peers(peer_ipv6_addrs):
peer_errors.append("ipv6 address must be unique per router")
peer_errors += validate_unique_peers(peer, peers)

for e in peer_errors:
post_annotation(e, filename, peer["__line__"])
Expand Down Expand Up @@ -149,12 +140,18 @@ def validate(node_type, peer):
return filter(None, errors)


def validate_unique_peers(peer_ip_addrs):
if len(set(peer_ip_addrs)) < len(peer_ip_addrs):
return False
def validate_unique_peers(this_peer, peers):
errors = []

for p in peers:
if this_peer["name"] == p["name"]: continue

return True
for af in ['ipv4', 'ipv6']:
if af in this_peer and af in p:
if this_peer[af] == p[af]:
errors.append(f"{af} address ({this_peer[af]}) must be unique per router: conflict with {p['name']}")

return filter(None, errors)

def validate_asn(number):
# Build ASN cache
Expand Down

0 comments on commit a222240

Please sign in to comment.