diff --git a/package.json b/package.json index e616f40..a849b81 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "use-ai-lib", - "version": "0.0.2", + "version": "0.0.3-alpha", "description": "A React hooks library for building AI-powered apps as simple as possible.", "type": "module", "source": "src/index.ts", diff --git a/src/hooks/useAIModel.tsx b/src/hooks/useAIModel.tsx index 47f9436..57c5722 100644 --- a/src/hooks/useAIModel.tsx +++ b/src/hooks/useAIModel.tsx @@ -1,7 +1,7 @@ -import type { DeepPartial, Schema } from "@ai-sdk/ui-utils"; +import type { DeepPartial } from "@ai-sdk/ui-utils"; import type { LanguageModel } from "ai"; import { useEffect, useMemo } from "react"; -import type { ZodTypeDef, Schema as zSchema } from "zod"; +import type { z } from "zod"; import { useModelContext } from "../provider"; import { useGenerateObject, @@ -15,7 +15,8 @@ interface Options extends Prompt { /** * The schema of the object that the model should generate. Use 'zod' to declare. */ - schema?: zSchema | Schema; + // biome-ignore lint/suspicious/noExplicitAny: + schema?: z.Schema; /** * Streams the output or not. */ @@ -24,7 +25,9 @@ interface Options extends Prompt { * Do something when AI data is generated. * Use this callback to get the data during streaming rather than through the final 'data'. */ - onSuccess?: (data: string | DeepPartial | OBJECT) => void; + onSuccess?: ( + data: OBJECT extends string ? string : DeepPartial | OBJECT, + ) => void; } interface UseAIModel { @@ -94,6 +97,7 @@ export function useAIModel( ...prompt, }, { + onSuccess: onSuccess as (data: string) => void, enabled: !!stream && !schema && !emptyInput, }, ); @@ -104,7 +108,7 @@ export function useAIModel( isFetching: isObjectFetching, isError: isObjectError, error: objectError, - } = useGenerateObject( + } = useGenerateObject>( { model, // biome-ignore lint/style/noNonNullAssertion: @@ -121,7 +125,7 @@ export function useAIModel( isFetching: isStreamObjectFetching, isError: isStreamObjectError, error: streamObjectError, - } = useStreamObject( + } = useStreamObject>( { model, // biome-ignore lint/style/noNonNullAssertion: @@ -129,7 +133,7 @@ export function useAIModel( ...prompt, }, { - onSuccess, + onSuccess: onSuccess as (data: DeepPartial | D) => void, enabled: !!stream && !!schema && !emptyInput, }, ); @@ -140,7 +144,7 @@ export function useAIModel( // biome-ignore lint: onSuccess usually won't change useEffect(() => { if (!data) return; - onSuccess?.(data); + (onSuccess as (data: string | D) => void)?.(data); }, [data]); return { diff --git a/src/queries/useStreamObject.tsx b/src/queries/useStreamObject.tsx index 739205a..f130880 100644 --- a/src/queries/useStreamObject.tsx +++ b/src/queries/useStreamObject.tsx @@ -1,4 +1,4 @@ -import type { Schema } from "@ai-sdk/ui-utils"; +import type { DeepPartial } from "@ai-sdk/ui-utils"; import { useQuery } from "@tanstack/react-query"; import { streamObject } from "ai"; import type { LanguageModel } from "ai"; @@ -6,10 +6,9 @@ import type { ZodTypeDef, Schema as zSchema } from "zod"; import type { CallSettings, Prompt, TelemetrySettings } from "../types"; // use some of options -type Options = { +type Options = { enabled?: boolean; - // biome-ignore lint/suspicious/noExplicitAny: TODO - onSuccess?: (data: any) => void; + onSuccess?: (data: DeepPartial | OBJECT) => void; }; // There are three overloads, use this one currently. @@ -24,7 +23,7 @@ The language model to use. The schema of the object that the model should generate. */ // biome-ignore lint/suspicious/noExplicitAny: TODO - schema: zSchema | Schema; + schema: zSchema; /** Optional name of the output that should be generated. Used by some providers for additional LLM guidance, e.g. @@ -64,7 +63,7 @@ Callback that is called when the LLM response and the final object validation ar export function useStreamObject( params: StreamObjectParams, - options?: Options, + options?: Options, ) { const query = useQuery({ queryKey: ["streamObject", JSON.stringify(params.messages)],