Skip to content

Commit

Permalink
Get initial API up and running
Browse files Browse the repository at this point in the history
  • Loading branch information
chand1012 committed Jul 15, 2021
1 parent a6f189c commit 27dc9e5
Show file tree
Hide file tree
Showing 6 changed files with 84 additions and 3 deletions.
11 changes: 11 additions & 0 deletions Dockerfile.api-cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM tensorflow/tensorflow:2.5.0

ENV LANG=C.UTF-8
RUN mkdir /gpt-2
WORKDIR /gpt-2
ADD . /gpt-2
RUN pip3 install -r requirements-api.txt
RUN python3 download_model.py 124M
RUN python3 download_model.py 355M
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5000"]
EXPOSE 5000
45 changes: 45 additions & 0 deletions api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from typing import Optional

from fastapi import FastAPI

import src.generate as generate
# from api_models import ResponseModel, RequestModel

app = FastAPI()

@app.get('/')
def index():
return {'message': 'Hello World!'}

# @app.get('/samples', response_class=ResponseModel)
@app.get('/samples')
def samples_get(
prompt: str,
nsamples: Optional[int] = 1,
model_name: Optional[str] = '124M', # Only 124M and 355M work on CPU API
batch_size: Optional[int] = 1,
seed: Optional[float] = None,
length: Optional[int] = None,
top_k: Optional[int] = 40,
temperature: Optional[float] = 1.0
):

output = generate.samples(prompt, model_name, seed, nsamples, batch_size, length, temperature, top_k)

return {'prompt':prompt, 'responses':output}

# @app.post('/samples', response_class=ResponseModel)
# def samples_post(data: RequestModel):

# output = generate.samples(
# data.prompt,
# data.model_name,
# data.seed,
# data.nsamples,
# data.batch_size,
# data.length,
# data.temperature,
# data.top_k
# )

# return {'prompt':data.prompt, 'responses':output}
18 changes: 18 additions & 0 deletions api_models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from typing import List, Optional
from pydantic import BaseModel

# need to get these working

class ResponseModel(BaseModel):
prompt: str # Echoed prompt
responses: List[str]

class RequestModel(BaseModel):
prompt: str
nsamples: Optional[int] = 1
model_name: Optional[str] = '124M'
batch_size: Optional[int] = 1
seed: Optional[float] = None
length: Optional[int] = None
top_k: Optional[int] = 40
temperature: Optional[float] = 1.0
Empty file added src/__init__.py
Empty file.
8 changes: 6 additions & 2 deletions src/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@
import numpy as np
import tensorflow as tf

import model, sample, encoder

try:
import model, encoder, sample
except ModuleNotFoundError:
import src.model as model
import src.encoder as encoder
import src.sample as sample

def samples(
prompt,
Expand Down
5 changes: 4 additions & 1 deletion src/sample.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import tensorflow as tf

import model
try:
import model
except ModuleNotFoundError:
import src.model as model

def top_k_logits(logits, k):
if k == 0:
Expand Down

0 comments on commit 27dc9e5

Please sign in to comment.