Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
jjranalli committed Apr 16, 2023
2 parents e5d6b04 + cde8059 commit 6ebf83c
Show file tree
Hide file tree
Showing 13 changed files with 284 additions and 130 deletions.
9 changes: 9 additions & 0 deletions app/github/route.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { handleGithubAuth } from "@lib/handleGithubAuth"
import { replyIssueComment } from "@lib/replyIssueComment"
import { summarizePullRequest } from "@lib/summarizePullRequest"
import { NextRequest, NextResponse } from "next/server"

Expand All @@ -9,9 +10,17 @@ export async function POST(req: NextRequest) {

try {
if (payload.action == "opened" || payload.action == "synchronize") {
// If a PR is opened or updated, summarize it
const octokit = await handleGithubAuth(payload)

await summarizePullRequest(payload, octokit)
} else if (payload.action == "created") {
if (payload.comment.body.includes("/ask-codex")) {
// If a comment is created, reply to it
const octokit = await handleGithubAuth(payload)

await replyIssueComment(payload, octokit)
}
}

return NextResponse.json("ok")
Expand Down
12 changes: 7 additions & 5 deletions lib/joinStringsUntilMaxLength.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,24 @@
export function joinStringsUntilMaxLength(
parsedFiles: string[],
maxLength: number
): string {
let combinedString = ""
) {
let codeDiff = ""
let currentLength = 0
let maxLengthExceeded = false

for (const file of parsedFiles) {
const fileLength = file.length

if (currentLength + fileLength <= maxLength) {
combinedString += file
codeDiff += file
currentLength += fileLength
} else {
maxLengthExceeded = true
const remainingLength = maxLength - currentLength
combinedString += file.slice(0, remainingLength)
codeDiff += file.slice(0, remainingLength)
break
}
}

return combinedString
return { codeDiff, maxLengthExceeded }
}
55 changes: 55 additions & 0 deletions lib/replyIssueComment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { Octokit } from "@octokit/rest"
import { ChatCompletionRequestMessage } from "openai-streams"
import { generateChatGpt } from "../utils/generateChatGpt"
import { getCodeDiff } from "../utils/getCodeDiff"

export const startDescription = "\n\n<!-- start pr-codex -->"
export const endDescription = "<!-- end pr-codex -->"
const systemPrompt =
"You are a Git diff assistant. Given a code diff, you answer any question related to it. Be concise. Always wrap file names, functions, objects and similar in backticks (`)."

export async function replyIssueComment(payload: any, octokit: Octokit) {
// Get relevant PR information
const { repository, issue, sender, comment } = payload

const question = comment.body.split("/ask-codex")[1].trim()

if (question) {
const { owner, repo, issue_number } = {
owner: repository.owner.login,
repo: repository.name,
issue_number: issue.number
}

// Get the diff content using Octokit and GitHub API
const { codeDiff } = await getCodeDiff(owner, repo, issue_number, octokit)

// If there are changes, trigger workflow
if (codeDiff?.length != 0) {
const messages: ChatCompletionRequestMessage[] = [
{
role: "system",
content: `${systemPrompt}\n\nHere is the code diff:\n\n${codeDiff}`
},
{
role: "user",
content: `${question}`
}
]

const codexResponse = await generateChatGpt(messages)

const description = `> ${question}\n\n@${sender.login} ${codexResponse}`

await octokit.issues.createComment({
owner,
repo,
issue_number,
body: description
})

return codexResponse
}
throw new Error("No changes in PR")
}
}
133 changes: 38 additions & 95 deletions lib/summarizePullRequest.ts
Original file line number Diff line number Diff line change
@@ -1,138 +1,81 @@
import { Octokit } from "@octokit/rest"
import { ChatCompletionRequestMessage, OpenAI } from "openai-streams"
import { yieldStream } from "yield-stream"
import { parseDiff } from "../utils/parseDiff"
import { joinStringsUntilMaxLength } from "./joinStringsUntilMaxLength"
import { ChatCompletionRequestMessage } from "openai-streams"
import { generateChatGpt } from "../utils/generateChatGpt"
import { getCodeDiff } from "../utils/getCodeDiff"

export const startDescription = "<!-- start pr-codex -->"
export const startDescription = "\n\n<!-- start pr-codex -->"
export const endDescription = "<!-- end pr-codex -->"
const systemPrompt =
"You are a Git diff assistant. Given a code diff, you provide a clear and concise description of its content. Always wrap file names, functions, objects and similar in backticks (`)."

export async function summarizePullRequest(payload: any, octokit: Octokit) {
// Get relevant PR information
const pr = payload.pull_request
const { owner, repo, number } = {
const { owner, repo, pull_number } = {
owner: pr.base.repo.owner.login,
repo: pr.base.repo.name,
number: pr.number
pull_number: pr.number
}

// Get the diff content using Octokit and GitHub API
const compareResponse = await octokit.rest.repos.compareCommits({
const { codeDiff, skippedFiles, maxLengthExceeded } = await getCodeDiff(
owner,
repo,
base: pr.base.sha,
head: pr.head.sha,
mediaType: {
format: "diff"
}
})
const diffContent = String(compareResponse.data)

// Parses the diff content and returns the parsed files.
// If the number of changes in a file is greater than 1k changes, the file will be skipped.
// The codeDiff is the joined string of parsed files, up to a max length of 10k.
const maxChanges = 1000
const { parsedFiles, skippedFiles } = parseDiff(diffContent, maxChanges)
const codeDiff = joinStringsUntilMaxLength(parsedFiles, 10000)
pull_number,
octokit
)

// If there are changes, trigger workflow
if (codeDiff.length != 0) {
const systemPrompt = `You are a Git diff assistant. Always begin with "This PR". Given a code diff, you provide a simple description in prose, in less than 300 chars, which sums up the changes. Continue with "\n\n### Detailed summary\n" and make a comprehensive list of all changes, excluding any eventual skipped files. Be concise. Always wrap file names, functions, objects and similar in backticks (\`).${
skippedFiles.length != 0
? ` After the list, conclude with "\n\n> " and mention that the following files were skipped due to too many changes: ${skippedFiles.join(
","
)}.`
: ""
}`

if (codeDiff?.length != 0) {
const messages: ChatCompletionRequestMessage[] = [
{
role: "system",
content: systemPrompt
content: `${systemPrompt}\n\nHere is the code diff:\n\n${codeDiff}`
},
{
role: "user",
content: `Here is the code diff:\n\n${codeDiff}`
content:
'Starting with "This PR", clearly explain the focus of this PR in prose, in less than 300 characters. Then follow up with "\n\n### Detailed summary\n" and make a comprehensive list of all changes.'
}
]

const summary = await generateChatGpt(messages)
const codexResponse = await generateChatGpt(messages)

// Check if the PR already has a comment from the bot
const hasCodexCommented =
payload.action == "synchronize" &&
pr.body?.split("\n\n" + startDescription).length > 1

// if (firstComment) {
// // Edit pinned bot comment to the PR
// await octokit.issues.updateComment({
// owner,
// repo,
// comment_id: firstComment.id,
// body: summary
// })
// } else {
// // Add a comment to the PR
// await octokit.issues.createComment({
// owner,
// repo,
// issue_number: number,
// body: summary
// })
// }
pr.body?.split(startDescription).length > 1

const prCodexText = `\n\n${startDescription}\n\n---\n\n## PR-Codex overview\n${summary}\n\n${endDescription}`
const prCodexText = `${startDescription}\n\n${
(hasCodexCommented ? pr.body.split(startDescription)[0].trim() : pr.body)
? "---\n\n"
: ""
}## PR-Codex overview\n${codexResponse}${
skippedFiles.length != 0
? `\n\n> The following files were skipped due to too many changes: ${skippedFiles.join(
", "
)}`
: ""
}${
maxLengthExceeded
? "\n\n> The code diff exceeds the max number of characters, so this overview may be incomplete."
: ""
}\n\n${endDescription}`

const description = hasCodexCommented
? pr.body.split("\n\n" + startDescription)[0] +
? pr.body.split(startDescription)[0] +
prCodexText +
pr.body.split(endDescription)[1]
: pr.body + prCodexText
: (pr.body ?? "") + prCodexText

await octokit.issues.update({
owner,
repo,
issue_number: number,
issue_number: pull_number,
body: description
})

return summary
}
}

const generateChatGpt = async (messages: ChatCompletionRequestMessage[]) => {
const DECODER = new TextDecoder()
let text = ""

try {
const stream = await OpenAI(
"chat",
{
model: "gpt-3.5-turbo",
temperature: 0.7,
messages
},
{ apiKey: process.env.OPENAI_API_KEY }
)

for await (const chunk of yieldStream(stream)) {
try {
const decoded: string = DECODER.decode(chunk)

if (decoded === undefined)
throw new Error(
"No choices in response. Decoded response: " +
JSON.stringify(decoded)
)

text += decoded
} catch (err) {
console.error(err)
}
}
} catch (err) {
console.error(err)
return codexResponse
}

return text
throw new Error("No changes in PR")
}
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
"start": "next start",
"lint": "next lint",
"prettier": "prettier --write . --ignore-path .gitignore",
"summarize": "npx ts-node scripts/summarize"
"summarize": "npx ts-node scripts/summarize",
"reply": "npx ts-node scripts/reply"
},
"dependencies": {
"@headlessui/react": "^1.7.14",
Expand Down
33 changes: 33 additions & 0 deletions scripts/reply.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import dotenv from "dotenv"
import { handleGithubAuth } from "../lib/handleGithubAuth"
import { replyIssueComment } from "../lib/replyIssueComment"
import { testPayloadComment } from "../utils/github/testPayloadComment"

dotenv.config()

// Customize payload in `utils/testPayloadComment`

async function main() {
try {
const octokit = await handleGithubAuth(testPayloadComment)

console.log("Generating comment...")

const comment = await replyIssueComment(testPayloadComment, octokit)

console.log(
"PR-Codex commented:\n\n",
comment,
"\n\nView on Github: https://github.com/decentralizedlabs/pr-codex/pull/4"
)
} catch (error) {
console.log(error)
}
}

main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error)
process.exit(1)
})
10 changes: 5 additions & 5 deletions scripts/summarize.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
import dotenv from "dotenv"
import { handleGithubAuth } from "../lib/handleGithubAuth"
import { summarizePullRequest } from "../lib/summarizePullRequest"
import { testPayload } from "../utils/github/testPayload"
import { testPayloadSyncPr } from "../utils/github/testPayloadSyncPr"

dotenv.config()

// Customize payload in `utils/testPayload`
// Customize payload in `utils/testPayloadSyncPr`

async function main() {
const octokit = await handleGithubAuth(testPayload)

try {
const octokit = await handleGithubAuth(testPayloadSyncPr)

console.log("Generating summary...")

const summary = await summarizePullRequest(testPayload, octokit)
const summary = await summarizePullRequest(testPayloadSyncPr, octokit)

console.log(
"PR-Codex wrote:\n\n",
Expand Down
41 changes: 41 additions & 0 deletions utils/generateChatGpt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { ChatCompletionRequestMessage, OpenAI } from "openai-streams"
import { yieldStream } from "yield-stream"

export const generateChatGpt = async (
messages: ChatCompletionRequestMessage[]
) => {
const DECODER = new TextDecoder()
let text = ""

try {
const stream = await OpenAI(
"chat",
{
model: "gpt-3.5-turbo",
temperature: 0.7,
messages
},
{ apiKey: process.env.OPENAI_API_KEY }
)

for await (const chunk of yieldStream(stream)) {
try {
const decoded: string = DECODER.decode(chunk)

if (decoded === undefined)
throw new Error(
"No choices in response. Decoded response: " +
JSON.stringify(decoded)
)

text += decoded
} catch (err) {
console.error(err)
}
}
} catch (err) {
console.error(err)
}

return text
}
Loading

1 comment on commit 6ebf83c

@vercel
Copy link

@vercel vercel bot commented on 6ebf83c Apr 16, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.