Skip to content

Commit

Permalink
feat(ci): validation
Browse files Browse the repository at this point in the history
  • Loading branch information
wdhdev committed Nov 9, 2024
1 parent 2b5e7af commit 94d1338
Show file tree
Hide file tree
Showing 9 changed files with 538 additions and 30 deletions.
6 changes: 1 addition & 5 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
* @phenax @wdhdev
* @wdhdev

/.github/ @wdhdev
/domains/ @is-a-dev/maintainers

*.md @is-a-dev/maintainers
/LICENSE @phenax
/dnsconfig.js @wdhdev
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ on:
branches: [main]
paths:
- "domains/*"
- ".github/workflows/validation.yml"
- "tests/*"
- "utils/*"
- ".github/workflows/validate.yml"
- "dnsconfig.js"

workflow_dispatch:
Expand All @@ -28,15 +30,12 @@ jobs:
with:
args: check

json:
name: JSON
tests:
name: Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: JSON Syntax Check
uses: limitusus/json-syntax-check@v2
with:
pattern: "\\.json$"
env:
BASE: "domains/"
- run: npm install

- run: npm test
22 changes: 12 additions & 10 deletions dnsconfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,15 +75,18 @@ for (var subdomain in domains) {

// Handle DS records
if (domainData.record.DS) {
records.push(
DS(
subdomainName,
domainData.record.DS.key_tag,
domainData.record.DS.algorithm,
domainData.record.DS.digest_type,
domainData.record.DS.digest
)
);
for (var ds in domainData.record.DS) {
var dsRecord = domainData.record.DS[ds];
records.push(
DS(
subdomainName,
dsRecord.key_tag,
dsRecord.algorithm,
dsRecord.digest_type,
dsRecord.digest
)
);
}
}

// Handle MX records
Expand Down Expand Up @@ -151,7 +154,6 @@ var options = {

var ignored = [
IGNORE("@", "MX,TXT"),
IGNORE("\\*"),
IGNORE("_acme-challenge", "TXT"),
IGNORE("_autodiscover._tcp", "SRV"),
IGNORE("_dmarc", "TXT"),
Expand Down
14 changes: 8 additions & 6 deletions domains/sandbox.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@
"blue.foundationdns.net",
"blue.foundationdns.org"
],
"DS": {
"key_tag": 2371,
"algorithm": 13,
"digest_type": 2,
"digest": "023DD50C657C5F2471728B76127008F244CFB45F32AA0CE1978C0182D363EF12"
}
"DS": [
{
"key_tag": 2371,
"algorithm": 13,
"digest_type": 2,
"digest": "023DD50C657C5F2471728B76127008F244CFB45F32AA0CE1978C0182D363EF12"
}
]
}
}
9 changes: 9 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"devDependencies": {
"ava": "^6.2.0",
"fs-extra": "^11.2.0"
},
"scripts": {
"test": "npx ava tests/*.test.js"
}
}
40 changes: 40 additions & 0 deletions tests/domains.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
const t = require("ava");
const fs = require("fs-extra");
const path = require("path");

const domainsPath = path.resolve("domains");
const files = fs.readdirSync(domainsPath);

// Nested subdomains should not exist if the parent subdomain does not exist
t("Nested subdomains should not exist without a parent subdomain", (t) => {
files.forEach((file) => {
const subdomain = file.replace(".json", "");

if (subdomain.split(".").length > 1) {
const parentSubdomain = subdomain.split(".").pop();

t.true(
files.includes(`${parentSubdomain}.json`),
`${file}: Parent subdomain does not exist`
);
}
});

t.pass();
});

// Nested subdomains should not exist if the parent subdomain has NS records
t("Nested subdomains should not exist if the parent subdomain has NS records", (t) => {
files.forEach((file) => {
const subdomain = file.replace(".json", "");

if (subdomain.split(".").length > 1) {
const parentSubdomain = subdomain.split(".").pop();
const parentDomain = fs.readJsonSync(path.join(domainsPath, `${parentSubdomain}.json`));

t.is(parentDomain.record.NS, undefined, `${file}: Parent subdomain has NS records`);
}
});

t.pass();
});
107 changes: 107 additions & 0 deletions tests/json.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
const t = require("ava");
const fs = require("fs-extra");
const path = require("path");

const requiredFields = {
owner: "object",
record: "object",
};

const optionalFields = {
proxied: "boolean",
reserved: "boolean",
};

const requiredOwnerFields = {
username: "string",
};

const optionalOwnerFields = {
email: "string",
};

const emailRegex = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/;
const hostnameRegex =
/^(?=.{1,253}$)(?:(?:[_a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)\.)+[a-zA-Z]{2,63}$/;

const domainsPath = path.resolve("domains");
const files = fs.readdirSync(domainsPath);

const validateRequiredFields = (t, obj, requiredFields, file) => {
Object.keys(requiredFields).forEach((key) => {
t.true(obj.hasOwnProperty(key), `${file}: Missing required field: ${key}`);
t.is(
typeof obj[key],
requiredFields[key],
`${file}: Field ${key} should be of type ${requiredFields[key]}`
);
});
};

const validateOptionalFields = (t, obj, optionalFields, file) => {
Object.keys(optionalFields).forEach((key) => {
if (obj.hasOwnProperty(key)) {
t.is(
typeof obj[key],
optionalFields[key],
`${file}: Field ${key} should be of type ${optionalFields[key]}`
);
}
});
};

// Ensure all files are valid JSON
t("All files should be valid JSON", (t) => {
files.forEach((file) => {
t.notThrows(() => fs.readJsonSync(path.join(domainsPath, file)), `${file}: Invalid JSON file`);
});
});

// Ensure all files have the required fields
t("All files should have valid file names", (t) => {
files.forEach((file) => {
t.true(file.endsWith(".json"), `${file}: File does not have .json extension`);
t.false(file.includes(".is-a.dev"), `${file}: File name should not contain .is-a.dev`);

// Ignore root domain
if (file !== "@.json") {
t.regex(
file.replace(/\.json$/, "") + ".is-a.dev",
hostnameRegex,
`${file}: FQDN must be 1-253 characters, use letters, numbers, dots, or hyphens, and not start or end with a hyphen.`
);
}
});
});

// Ensure all files have the required fields
t("All files should have the required fields", (t) => {
files.forEach((file) => {
const data = fs.readJsonSync(path.join(domainsPath, file));

validateRequiredFields(t, data, requiredFields, file);
validateRequiredFields(t, data.owner, requiredOwnerFields, file);

if (!data.reserved) {
t.true(Object.keys(data.record).length > 0, `${file}: No record types found`);
}
});
});

// Validate the optional fields
t("All files should have valid optional fields", (t) => {
files.forEach((file) => {
const data = fs.readJsonSync(path.join(domainsPath, file));

validateOptionalFields(t, data, optionalFields, file);
validateOptionalFields(t, data.owner, optionalOwnerFields, file);

if (data.owner.email) {
t.regex(
data.owner.email,
emailRegex,
`${file}: Owner email should be a valid email address`
);
}
});
});
Loading

0 comments on commit 94d1338

Please sign in to comment.