Skip to content

Commit

Permalink
chore(lint): add autofix-python
Browse files Browse the repository at this point in the history
  • Loading branch information
wsxiaoys committed Nov 28, 2023
1 parent e92a8c8 commit 7c37649
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 9 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/autofix-python.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: autofix.ci (python)

on:
pull_request:
branches: [ "main" ]
paths:
- 'python/**'

permissions:
contents: read

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }}

# If this is enabled it will cancel current running and start latest
cancel-in-progress: true

jobs:
autofix:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive

- uses: chartboost/ruff-action@v1
with:
src: "./python"
args: --fix

- uses: autofix-ci/action@d3e591514b99d0fca6779455ff8338516663f7cc
15 changes: 6 additions & 9 deletions python/tabby-eval/modal/predict.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import asyncio
import json
import modal
import os
import pandas as pd

from collections import namedtuple
from datetime import datetime
from modal import Image, Mount, Secret, Stub, asgi_app, gpu, method
from pathlib import Path
from typing import Union, List, Optional, Any, Tuple
from modal import Image, Stub, gpu, method
from typing import List, Optional, Tuple


GPU_CONFIG = gpu.A10G()
Expand Down Expand Up @@ -61,7 +58,8 @@ class Model:

def __enter__(self):
import socket
import subprocess, os
import subprocess
import os
import time

from tabby_python_client import Client
Expand Down Expand Up @@ -108,12 +106,11 @@ async def health(self):

@method()
async def complete(self, language: str, index: int, prompt: str) -> Tuple[int, Optional[str], Optional[str]]:
from tabby_python_client import errors
from tabby_python_client.api.v1 import completion
from tabby_python_client.models import (
CompletionRequest,
DebugOptions,
CompletionResponse,
Segments,
)
from tabby_python_client.types import Response

Expand All @@ -127,7 +124,7 @@ async def complete(self, language: str, index: int, prompt: str) -> Tuple[int, O
client=self.client, json_body=request
)

if resp.parsed != None:
if resp.parsed is not None:
return index, resp.parsed.choices[0].text, None
else:
return index, None, f"<{resp.status_code}>"
Expand Down

0 comments on commit 7c37649

Please sign in to comment.