Skip to content

Commit

Permalink
use mc to fetch llm from s3
Browse files Browse the repository at this point in the history
  • Loading branch information
avouacr committed Jun 20, 2024
1 parent d8edc4d commit 73fa184
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 8 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
FROM inseefrlab/onyxia-vscode-pytorch:py3.12.2-gpu
FROM inseefrlab/onyxia-python-pytorch:py3.12.3-gpu

USER root

WORKDIR /app

COPY . /app/

RUN pip install --no-cache-dir --upgrade -r requirements-app.txt && \
RUN pip install --no-cache-dir --upgrade -r requirements.txt && \
chmod +x entrypoint.sh

EXPOSE 8000
Expand Down
13 changes: 7 additions & 6 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
export MC_HOST_s3=https://$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY@$AWS_S3_ENDPOINT
mc cp --recursive s3/$S3_BUCKET/data/chroma_database/chroma_db/ data/chroma_db

# Cache LLM
python -m src.model_building.fetch_llm_model 2>&1
# Used cached LLM from S3 if available
MODEL_NAME_HF=$(echo "$MODEL_NAME" | sed 's|/|--|g' | sed 's|^|models--|')
MODEL_PATH_S3=s3/$S3_BUCKET/models/hf_hub/$MODEL_NAME_HF
if mc ls $MODEL_PATH_S3 > /dev/null 2>&1; then
echo "Fetching cached LLM $MODEL_NAME from S3."
mc cp --recursive $MODEL_PATH_S3/ $HOME/.cache/huggingface/hub/$MODEL_NAME_HF
fi

# Run app
chainlit run app.py --host 0.0.0.0 --port 8000 -h

# Run dev app
chainlit run dev_app.py --host 0.0.0.0 --port 8000 -h -w

0 comments on commit 73fa184

Please sign in to comment.