Skip to content

Commit

Permalink
Core infrastructure of secrets-finder (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
arber-salihi authored Jun 26, 2024
1 parent 3cb0fff commit 63fe0b5
Show file tree
Hide file tree
Showing 92 changed files with 8,302 additions and 0 deletions.
64 changes: 64 additions & 0 deletions .github/workflows/api.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));



async function call_api_with_retry_logic(api_call, max_retries = 5, default_delay = 5, secondary_rate_limit_delay_base = 60, delay_function = delay) {
for (let i = 0; i < max_retries; i++) {
try {
return await api_call();
} catch (error) {
console.error(`Attempt ${i + 1} failed: ${error.message}`);

let wait_time;
if (error.response && error.response.status && error.response.headers) {
const status = error.response.status;
const retry_after = error.response.headers["retry-after"];
const rate_limit_remaining = error.response.headers["x-ratelimit-remaining"];
const rate_limit_reset = error.response.headers["x-ratelimit-reset"];
const max_waiting_time = 900;

if (status === 403) {
if (rate_limit_remaining === "0") {
wait_time = rate_limit_reset - Math.floor(Date.now() / 1000);
if (wait_time > max_waiting_time) {
console.error(`Rate limit reset time is in ${wait_time} seconds. Operation aborted.`);
throw error;
} else {
console.error(`Rate limit exceeded. Retrying in ${wait_time} seconds...`);
}
} else if (retry_after && parseInt(retry_after) > max_waiting_time) {
console.error(`Retry after time is in ${retry_after} seconds. Operation aborted.`);
throw error;
} else if (
error.response.data.message.includes("secondary rate limit")
) {
wait_time = secondary_rate_limit_delay_base + Math.floor(0.5 * Math.random() * secondary_rate_limit_delay_base);
console.error(`Secondary rate limit exceeded. Retrying in ${wait_time} seconds...`);
} else {
wait_time = parseInt(retry_after);
console.error(`Rate limit exceeded. Retrying in ${wait_time} seconds...`);
}
} else if (status >= 500) {
wait_time = default_delay;
console.error(`An internal error occurred on server. Retrying in ${wait_time} seconds...`);
} else if (status >= 400) {
console.error(`Client error: ${status}. Operation aborted.`);
throw error;
}
} else {
wait_time = default_delay;
console.error(`Unknown error. Retrying in ${wait_time} seconds...`);
}

if (i === max_retries - 1) {
throw error;
}

await delay_function(wait_time * 1000);
}
}
}



module.exports = call_api_with_retry_logic;
176 changes: 176 additions & 0 deletions .github/workflows/api.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
const call_api_with_retry_logic = require('./api');

jest.setTimeout(999999);

describe('call_api_with_retry_logic', () => {
let api_call, delay_function;
const max_retries = 3;
const default_delay = 1;
const secondary_rate_limit_delay_base = 60;
const max_waiting_time = 900;

beforeEach(() => {
api_call = jest.fn();
delay_function = jest.fn();
});

test('should return api_call result when successful', async () => {
const expected = 'success';
api_call.mockResolvedValueOnce(expected);

const result = await call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function);

expect(result).toEqual(expected);
});

test('should retry on error and eventually succeed', async () => {
const expected = 'success';
const error = new Error('failure');
api_call.mockRejectedValueOnce(error).mockResolvedValueOnce(expected);

const result = await call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function);

expect(result).toEqual(expected);
expect(api_call).toHaveBeenCalledTimes(2);
expect(delay_function).toHaveBeenCalledTimes(1);
expect(delay_function).toHaveBeenCalledWith(default_delay * 1000);
});

test('should throw error when all retries fail', async () => {
const error = new Error('failure');
api_call.mockRejectedValue(error);

await expect(call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function)).rejects.toThrow(error);
expect(delay_function).toHaveBeenCalledTimes(max_retries-1);
expect(delay_function).toHaveBeenCalledWith(default_delay * 1000);
});

test('should handle 403 status with rate limit exceeded', async () => {
const error = new Error('failure');
const seconds_to_wait_before_retry = 2;
error.response = {
status: 403,
headers: {
'retry-after': '5',
'x-ratelimit-remaining': '0',
'x-ratelimit-reset': String(Math.floor(Date.now() / 1000) + seconds_to_wait_before_retry)
},
data: {
message: 'Rate limit exceeded'
}
};
api_call.mockRejectedValueOnce(error).mockResolvedValueOnce('success');

const result = await call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function);

expect(result).toEqual('success');
expect(delay_function).toHaveBeenCalledTimes(1);
expect(delay_function).toHaveBeenCalledWith(seconds_to_wait_before_retry * 1000);
});

test('should handle 403 status with secondary rate limit exceeded', async () => {
const error = new Error('failure');
error.response = {
status: 403,
headers: {},
data: {
message: 'secondary rate limit exceeded'
}
};
api_call.mockRejectedValueOnce(error).mockResolvedValueOnce('success');

const result = await call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function);

expect(result).toEqual('success');
expect(delay_function).toHaveBeenCalledTimes(1);
const [[wait_time]] = delay_function.mock.calls;
expect(wait_time).toBeGreaterThanOrEqual(secondary_rate_limit_delay_base*1000);
});

test('should handle 500 status', async () => {
const error = new Error('failure');
error.response = {
status: 500,
headers: {},
data: {
message: 'Server error'
}
};
api_call.mockRejectedValueOnce(error).mockResolvedValueOnce('success');

const result = await call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function);

expect(result).toEqual('success');
expect(delay_function).toHaveBeenCalledTimes(1);
expect(delay_function).toHaveBeenCalledWith(default_delay * 1000);
});

test('should handle 400 status and abort operation', async () => {
const error = new Error('failure');
error.response = {
status: 400,
headers: {},
data: {
message: 'Bad request'
}
};
api_call.mockRejectedValueOnce(error);

await expect(call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function)).rejects.toThrow(error);
expect(api_call).toHaveBeenCalledTimes(1);
expect(delay_function).not.toHaveBeenCalled();
});

test('should handle 403 status with retry after exceeding max waiting time', async () => {
const error = new Error('failure');
error.response = {
status: 403,
headers: {
'retry-after': `${max_waiting_time + 1}`,
},
data: {
message: 'Rate limit exceeded'
}
};
api_call.mockRejectedValueOnce(error);

await expect(call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function)).rejects.toThrow(error);
expect(api_call).toHaveBeenCalledTimes(1);
expect(delay_function).not.toHaveBeenCalled();
});

test('should handle 403 status with rate limit reset time exceeding max waiting time', async () => {
const error = new Error('failure');
error.response = {
status: 403,
headers: {
'x-ratelimit-remaining': '0',
'x-ratelimit-reset': String(Math.floor(Date.now() / 1000) + max_waiting_time + 1)
},
data: {
message: 'Rate limit exceeded'
}
};
api_call.mockRejectedValueOnce(error);

await expect(call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function)).rejects.toThrow(error);
expect(api_call).toHaveBeenCalledTimes(1);
expect(delay_function).not.toHaveBeenCalled();
});

test('should handle non-403 4xx status', async () => {
const error = new Error('failure');
error.response = {
status: 400,
headers: {},
data: {
message: 'Bad request'
}
};
api_call.mockRejectedValueOnce(error);

await expect(call_api_with_retry_logic(api_call, max_retries, default_delay, secondary_rate_limit_delay_base, delay_function)).rejects.toThrow(error);
expect(api_call).toHaveBeenCalledTimes(1);
expect(delay_function).not.toHaveBeenCalled();
});
});
117 changes: 117 additions & 0 deletions .github/workflows/github-issues.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
const call_api_with_retry_logic = require('./api.js');

const MAX_WAITING_TIME = 900;

async function create_issues(github_context, findings_per_committer) {
const max_retries = 5;
const default_delay = 5;
const secondary_rate_limit_delay_base = 60;

for (const committer in findings_per_committer) {
try {
const issue = await create_issue_for_committer(github_context, committer, findings_per_committer[committer], max_retries, default_delay, secondary_rate_limit_delay_base);
await add_label_to_issue(github_context, issue, max_retries, default_delay, secondary_rate_limit_delay_base);
await assign_issue_to_committer(github_context, committer, issue, max_retries, default_delay, secondary_rate_limit_delay_base);
} catch (error) {
console.error(`Failed to process committer ${committer}: ${error.message}`);
if (error.response) {
const retry_after = error.response.headers['retry-after'];
const rate_limit_remaining = error.response.headers['x-ratelimit-remaining'];
const rate_limit_reset = error.response.headers['x-ratelimit-reset'];
if (error.response && error.response.status === 403 && ((rate_limit_remaining === '0' && (rate_limit_reset - Math.floor(Date.now()/1000)) > MAX_WAITING_TIME) || (retry_after && int(retry_after) > MAX_WAITING_TIME))) {
throw error;
}
}
}
}
}

async function create_issue_for_committer(github_context, committer, commits, max_retries, default_delay, secondary_rate_limit_delay_base) {
console.log(`Creating issue for committer: ${committer}`);

const DEFAULT_VALUE = "N/A";
const FINDINGS_PER_COMMIT = Object.entries(commits).map(([id, findings], _) => {
const LIST_OF_FINDINGS = findings.map((entry, index) => {
const detector = entry.DetectorName || DEFAULT_VALUE;
const { file, line } = entry.SourceMetadata?.Data?.Git ?? {};
const author = committer ? `[${committer}](${github_context.server_url}/${committer})` : DEFAULT_VALUE;
const file_link = file ? `[${file}](${github_context.server_url}/${github_context.scanned_repository_owner}/${github_context.scanned_repository_name}/blob/${id}/${ encodeURI(file) }?plain=1#L${ line })` : DEFAULT_VALUE;

return `> **FINDING ${index + 1}**\n`
+ `> &nbsp;&nbsp; **Type**\n&nbsp;&nbsp; ${detector}\n`
+ `> &nbsp;&nbsp;\n`
+ `> &nbsp;&nbsp; **Author**\n&nbsp;&nbsp; ${author}\n`
+ `> &nbsp;&nbsp;\n`
+ `> &nbsp;&nbsp; **File**\n&nbsp;&nbsp; ${file_link}\n\n`;
});

return `**COMMIT ${id.substring(0, 7)}**\n` + LIST_OF_FINDINGS.join("");
}).join('<br />\n\n');

const number_of_commits_containing_secrets = Object.keys(commits).length;
const total_number_of_secrets = Object.values(commits).map(findings => findings.length).reduce((a, b) => a + b, 0);

const issue = `# ⚠️ WARNING: SECRET${total_number_of_secrets > 1 ? 'S' : ''} PUSHED TO MAIN BRANCH
### ${ total_number_of_secrets } secret${total_number_of_secrets > 1 ? 's have' : ' has'} been found in ${ number_of_commits_containing_secrets } commit${ number_of_commits_containing_secrets > 1 ? 's' : ''}.
<br/><br/>
## FINDINGS
${FINDINGS_PER_COMMIT}
Please note that the detector considers all secrets found in the commit${ number_of_commits_containing_secrets > 1 ? 's' : ''}, even if they have been pushed earlier.
<br/>
You can find more information in the workflow run that generated this report:\\
${github_context.server_url}/${github_context.secrets_finder_repository}/actions/runs/${github_context.run_id}
<br/><br/>
## REMEDIATION PROCEDURE
- You **MUST** rotate the credentials that were stored in plain text. Assume they have already been compromised.
- You **MUST** move the new credentials to an approved secrets management service and pattern.
- You **SHOULD** clear the plaintext secrets from Git history.
<br/><br/>
To clean-up your Git history, you can use the following guidance:
- [Removing Sensitive Data - GitHub](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/removing-sensitive-data-from-a-repository)
<br/><br/>
You can also find more information about how to rotate your secrets here:
https://howtorotate.com/docs/introduction/getting-started/`

const { data } = await call_api_with_retry_logic(() => github_context.api.issues.create({
owner: github_context.scanned_repository_owner,
repo: github_context.scanned_repository_name,
title: `⚠️ Secret${total_number_of_secrets > 1 ? 's' : ''} pushed to main branch (${ number_of_commits_containing_secrets } commit${ number_of_commits_containing_secrets > 1 ? 's' : ''} affected)`,
body: issue
}), max_retries, default_delay, secondary_rate_limit_delay_base);

console.log(`New issue created in repository: ${ data.html_url }`);
return data.number;
}

async function add_label_to_issue(github_context, issue, max_retries, default_delay, secondary_rate_limit_delay_base) {
console.log(`Adding label to issue: ${issue}`)
await call_api_with_retry_logic(() => github_context.api.issues.addLabels({
owner: github_context.scanned_repository_owner,
repo: github_context.scanned_repository_name,
issue_number: issue,
labels: ['leaked-secrets']
}), max_retries, default_delay, secondary_rate_limit_delay_base);
}

async function assign_issue_to_committer(github_context, committer, issue, max_retries, default_delay, secondary_rate_limit_delay_base) {
console.log(`Assigning issue to committer: ${committer}`);
await call_api_with_retry_logic(() => github_context.api.issues.addAssignees({
owner: github_context.scanned_repository_owner,
repo: github_context.scanned_repository_name,
issue_number: issue,
assignees: [committer]
}), max_retries, default_delay, secondary_rate_limit_delay_base);
}

module.exports = create_issues;
13 changes: 13 additions & 0 deletions .github/workflows/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"name": "github_api_client",
"version": "1.0.0",
"license": "MIT",
"main": "api.js",
"scripts": {
"test": "jest"
},
"author": "Thomson Reuters",
"devDependencies": {
"jest": "29.7.0"
}
}
Loading

0 comments on commit 63fe0b5

Please sign in to comment.