Skip to content

Commit

Permalink
Tests
Browse files Browse the repository at this point in the history
Closes #5
  • Loading branch information
flakey5 committed Apr 4, 2024
1 parent d6780cf commit f4ef69c
Show file tree
Hide file tree
Showing 23 changed files with 2,343 additions and 15 deletions.
51 changes: 51 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: Tests

on:
push:
branches:
- main
pull_request:

jobs:
setup-node-modules:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Git Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11

- name: Cache Dependencies
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9
with:
path: |
~/.npm
node_modules/.cache
key: ${{ runner.os }}-npm-${{ hashFiles('**/workflows/test.yml') }}
restore-keys: ${{ runner.os }}-npm-

- name: Install Dependencies
run: npm install

lint:
name: Linting
runs-on: ubuntu-latest
needs: setup-node-modules
steps:
- name: Run Linting
run: npm run lint

unit-tests:
name: Unit Tests
runs-on: ubuntu-latest
needs: setup-node-modules
steps:
- name: Run Tests
run: npm run test:unit

e2e-tests:
name: E2E Tests
runs-on: ubuntu-latest
needs: setup-node-modules
steps:
- name: Run Tests
run: npm run test:e2e
6 changes: 4 additions & 2 deletions ai-providers/open-ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { ReadableStream, UnderlyingByteSource, ReadableByteStreamController } fr
import OpenAI from 'openai'
import { AiProvider, NoContentError, StreamChunkCallback } from './provider'
import { ReadableStream as ReadableStreamPolyfill } from 'web-streams-polyfill'
import { fetch } from 'undici'
import { ChatCompletionChunk } from 'openai/resources/index.mjs'
import { AiStreamEvent, encodeEvent } from './event'
import createError from '@fastify/error'
Expand Down Expand Up @@ -86,7 +87,8 @@ export class OpenAiProvider implements AiProvider {

constructor (model: string, apiKey: string) {
this.model = model
this.client = new OpenAI({ apiKey })
// @ts-expect-error
this.client = new OpenAI({ apiKey, fetch })
}

async ask (prompt: string): Promise<string> {
Expand Down Expand Up @@ -118,6 +120,6 @@ export class OpenAiProvider implements AiProvider {
],
stream: true
})
return new ReadableStream(new OpenAiByteSource(response.toReadableStream()))
return new ReadableStream(new OpenAiByteSource(response.toReadableStream(), chunkCallback))
}
}
2 changes: 2 additions & 0 deletions index.d.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { ReadableStream } from 'node:stream/web'
import { PlatformaticApp } from '@platformatic/service'
import { errorResponseBuilderContext } from '@fastify/rate-limit'
import { AiWarpConfig } from './config'

declare module 'fastify' {
Expand All @@ -23,6 +24,7 @@ declare module 'fastify' {
onExceeded?: (req: FastifyRequest, key: string) => void
}
}
rateLimitMax?: ((req: FastifyRequest, key: string) => number) | ((req: FastifyRequest, key: string) => Promise<number>)
}
}

Expand Down
9 changes: 7 additions & 2 deletions lib/generator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,11 @@ class AiWarpGenerator extends ServiceGenerator {
const defaultBaseConfig = super.getDefaultConfig()
const defaultConfig = {
aiProvider: 'openai',
aiModel: 'gpt-3.5-turbo'
aiModel: 'gpt-3.5-turbo',
// TODO: temporary fix, when running the typescript files directly
// (in tests) this goes a directory above the actual project. Exposing
// temporarily until I come up with something better
aiWarpPackageJsonPath: join(__dirname, '..', '..', 'package.json')
}
return Object.assign({}, defaultBaseConfig, defaultConfig)
}
Expand Down Expand Up @@ -132,7 +136,8 @@ class AiWarpGenerator extends ServiceGenerator {

async getStackablePackageJson (): Promise<PackageJson> {
if (this._packageJson == null) {
const packageJsonPath = join(__dirname, '..', '..', 'package.json')
// const packageJsonPath = join(__dirname, '..', '..', 'package.json')
const packageJsonPath = this.config.aiWarpPackageJsonPath
const packageJsonFile = await readFile(packageJsonPath, 'utf8')
const packageJson: Partial<PackageJson> = JSON.parse(packageJsonFile)

Expand Down
Loading

0 comments on commit f4ef69c

Please sign in to comment.