Skip to content

Commit

Permalink
fix types to for better inference + adding response model config upda…
Browse files Browse the repository at this point in the history
…tes (#57)
  • Loading branch information
roodboi authored Jan 6, 2024
1 parent 13e789a commit de00760
Show file tree
Hide file tree
Showing 23 changed files with 95 additions and 94 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ const client = Instructor({
const user: User = await client.chat.completions.create({
messages: [{ role: "user", content: "Jason Liu is 30 years old" }],
model: "gpt-3.5-turbo",
response_model: UserSchema
response_model: { schema: UserSchema }
})

console.log(user)
Expand Down
2 changes: 1 addition & 1 deletion docs/blog/posts/anyscale.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ const client = Instructor({
const user = await client.chat.completions.create({
messages: [{ role: "user", content: "Harry Potter" }],
model: "mistralai/Mixtral-8x7B-Instruct-v0.1",
response_model: UserSchema,
response_model: { schema: UserSchema },
max_retries: 3
})

Expand Down
2 changes: 1 addition & 1 deletion docs/concepts/streaming.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ const client = Instructor({
const extractionStream = await client.chat.completions.create({
messages: [{ role: "user", content: textBlock }],
model: "gpt-4-1106-preview",
response_model: ExtractionValuesSchema,
response_model: { schema ExtractionValuesSchema },
max_retries: 3,
stream: true
})
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/action_items.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ const extractActionItems = async (data: string): Promise<ActionItems | undefined
},
],
model: "gpt-4-1106-preview",
response_model: ActionItemsSchema,
response_model: { schema: ActionItemsSchema },
max_tokens: 1000,
temperature: 0.0,
max_retries: 2,
Expand Down
4 changes: 2 additions & 2 deletions docs/examples/classification.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ async function classify(data: string): Promise<SimpleClassification> {
const classification = await client.chat.completions.create({
messages: [{ role: "user", content: `"Classify the following text: ${data}` }],
model: "gpt-3.5-turbo",
response_model: SimpleClassificationSchema,
response_model: { schema: SimpleClassificationSchema },
max_retries: 3
})

Expand Down Expand Up @@ -93,7 +93,7 @@ async function multi_classify(data: string): Promise<MultiClassification> {
const classification = await client.chat.completions.create({
messages: [{ role: "user", content: `"Classify the following support ticket: ${data}` }],
model: "gpt-3.5-turbo",
response_model: MultiClassificationSchema,
response_model: { schema: MultiClassificationSchema },
max_retries: 3
})
return classification
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/query_decomposition.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ const createQueryPlan = async (question: string): Promise<QueryPlan | undefined>
},
],
model: "gpt-4-1106-preview",
response_model: QueryPlanSchema,
response_model: { schema: QueryPlanSchema },
max_tokens: 1000,
temperature: 0.0,
max_retries: 2,
Expand Down
2 changes: 1 addition & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ const client = Instructor({
const user: User = await client.chat.completions.create({
messages: [{ role: "user", content: "Jason Liu is 30 years old" }],
model: "gpt-3.5-turbo",
response_model: UserSchema
response_model: { schema: UserSchema }
})

console.log(user)
Expand Down
12 changes: 5 additions & 7 deletions examples/action_items/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ const ActionItemsSchema = z.object({
items: z.array(TicketSchema)
})

type ActionItems = z.infer<typeof ActionItemsSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -35,8 +33,8 @@ const client = Instructor({
mode: "TOOLS"
})

const extractActionItems = async (data: string): Promise<ActionItems | undefined> => {
const actionItems: ActionItems = await client.chat.completions.create({
const extractActionItems = async (data: string) => {
const actionItems = await client.chat.completions.create({
messages: [
{
role: "system",
Expand All @@ -48,14 +46,14 @@ const extractActionItems = async (data: string): Promise<ActionItems | undefined
}
],
model: "gpt-4-1106-preview",
response_model: ActionItemsSchema,
temperature: 0.3,
response_model: { schema: ActionItemsSchema },
max_tokens: 1000,
temperature: 0.0,
max_retries: 2,
seed: 1
})

return actionItems || undefined
return actionItems
}

const actionItems = await extractActionItems(
Expand Down
8 changes: 3 additions & 5 deletions examples/classification/multi_prediction/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ const MultiClassificationSchema = z.object({
predicted_labels: z.array(z.nativeEnum(MULTI_CLASSIFICATION_LABELS))
})

type MultiClassification = z.infer<typeof MultiClassificationSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -25,16 +23,16 @@ const client = Instructor({
mode: "TOOLS"
})

const createClassification = async (data: string): Promise<MultiClassification | undefined> => {
const createClassification = async (data: string) => {
const classification = await client.chat.completions.create({
messages: [{ role: "user", content: `"Classify the following support ticket: ${data}` }],
model: "gpt-3.5-turbo",
response_model: MultiClassificationSchema,
response_model: { schema: MultiClassificationSchema },
max_retries: 3,
seed: 1
})

return classification || undefined
return classification
}

const classification = await createClassification(
Expand Down
8 changes: 3 additions & 5 deletions examples/classification/simple_prediction/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ const SimpleClassificationSchema = z.object({
class_label: z.nativeEnum(CLASSIFICATION_LABELS)
})

type SimpleClassification = z.infer<typeof SimpleClassificationSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -24,16 +22,16 @@ const client = Instructor({
mode: "FUNCTIONS"
})

const createClassification = async (data: string): Promise<SimpleClassification | undefined> => {
const createClassification = async (data: string) => {
const classification = await client.chat.completions.create({
messages: [{ role: "user", content: `"Classify the following text: ${data}` }],
model: "gpt-3.5-turbo",
response_model: SimpleClassificationSchema,
response_model: { schema: SimpleClassificationSchema },
max_retries: 3,
seed: 1
})

return classification || undefined
return classification
}

const classification = await createClassification(
Expand Down
2 changes: 1 addition & 1 deletion examples/extract_user/anyscale.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ const client = Instructor({
const user = await client.chat.completions.create({
messages: [{ role: "user", content: "Harry Potter" }],
model: "mistralai/Mixtral-8x7B-Instruct-v0.1",
response_model: UserSchema,
response_model: { schema: UserSchema },
max_retries: 3
})

Expand Down
6 changes: 3 additions & 3 deletions examples/extract_user/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ const UserSchema = z.object({
name: z.string()
})

type User = z.infer<typeof UserSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -22,7 +20,9 @@ const client = Instructor({
const user = await client.chat.completions.create({
messages: [{ role: "user", content: "Jason Liu is 30 years old" }],
model: "gpt-4",
response_model: UserSchema,
response_model: {
schema: UserSchema
},
max_retries: 3,
seed: 1
})
Expand Down
4 changes: 1 addition & 3 deletions examples/extract_user/properties.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ const UserSchema = z.object({
properties: z.array(property)
})

type User = z.infer<typeof UserSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -30,7 +28,7 @@ const client = Instructor({
const user = await client.chat.completions.create({
messages: [{ role: "user", content: "Happy Potter" }],
model: "gpt-4",
response_model: UserSchema,
response_model: { schema: UserSchema },
max_retries: 3,
seed: 1
})
Expand Down
4 changes: 3 additions & 1 deletion examples/extract_user_stream/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ const client = Instructor({
const extractionStream = await client.chat.completions.create({
messages: [{ role: "user", content: textBlock }],
model: "gpt-4-1106-preview",
response_model: ExtractionValuesSchema,
response_model: {
schema: ExtractionValuesSchema
},
max_retries: 3,
stream: true,
seed: 1
Expand Down
8 changes: 3 additions & 5 deletions examples/knowledge-graph/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ const KnowledgeGraphSchema = z.object({
edges: z.array(EdgeSchema)
})

type KnowledgeGraph = z.infer<typeof KnowledgeGraphSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -32,16 +30,16 @@ const client = Instructor({
mode: "JSON"
})

const createGraph = async (input: string): Promise<KnowledgeGraph | undefined> => {
const graph: KnowledgeGraph = await client.chat.completions.create({
const createGraph = async (input: string) => {
const graph = await client.chat.completions.create({
messages: [
{
role: "user",
content: `Help me understand following by describing as a detailed knowledge graph: ${input}`
}
],
model: "gpt-3.5-turbo-1106",
response_model: KnowledgeGraphSchema,
response_model: { schema: KnowledgeGraphSchema },
max_retries: 5,
seed: 1
})
Expand Down
10 changes: 4 additions & 6 deletions examples/query_decomposition/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ const QueryPlanSchema = z.object({
query_graph: z.array(QuerySchema)
})

type QueryPlan = z.infer<typeof QueryPlanSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -27,8 +25,8 @@ const client = Instructor({
mode: "FUNCTIONS"
})

const createQueryPlan = async (question: string): Promise<QueryPlan | undefined> => {
const queryPlan: QueryPlan = await client.chat.completions.create({
const createQueryPlan = async (question: string) => {
const queryPlan = await client.chat.completions.create({
messages: [
{
role: "system",
Expand All @@ -41,9 +39,9 @@ const createQueryPlan = async (question: string): Promise<QueryPlan | undefined>
}
],
model: "gpt-4-1106-preview",
response_model: QueryPlanSchema,
response_model: { schema: QueryPlanSchema },
max_tokens: 1000,
temperature: 0.3,
temperature: 0.0,
max_retries: 2,
seed: 1
})
Expand Down
8 changes: 3 additions & 5 deletions examples/resolving-complex-entitities/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ const DocumentExtractionSchema = z.object({
)
})

type DocumentExtraction = z.infer<typeof DocumentExtractionSchema>

const oai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY ?? undefined,
organization: process.env.OPENAI_ORG_ID ?? undefined
Expand All @@ -48,8 +46,8 @@ const client = Instructor({
mode: "TOOLS"
})

const askAi = async (input: string): Promise<DocumentExtraction | undefined> => {
const answer: DocumentExtraction = await client.chat.completions.create({
const askAi = async (input: string) => {
const answer = await client.chat.completions.create({
messages: [
{
role: "system",
Expand All @@ -62,7 +60,7 @@ const askAi = async (input: string): Promise<DocumentExtraction | undefined> =>
}
],
model: "gpt-4",
response_model: DocumentExtractionSchema,
response_model: { schema: DocumentExtractionSchema },
max_retries: 3,
seed: 1
})
Expand Down
Loading

0 comments on commit de00760

Please sign in to comment.