Skip to content

Commit

Permalink
Add mypy checker for importing and update CI condition (#387)
Browse files Browse the repository at this point in the history
  • Loading branch information
leehuwuj authored Oct 22, 2024
1 parent 189c0e3 commit 384a136
Show file tree
Hide file tree
Showing 22 changed files with 41 additions and 51 deletions.
5 changes: 5 additions & 0 deletions .changeset/two-masks-design.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

Fix import error if the artifact tool is selected
5 changes: 1 addition & 4 deletions helpers/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import prompts from "prompts";
import { ModelConfigParams } from ".";
import { questionHandlers, toChoice } from "../../questions/utils";
Expand Down Expand Up @@ -70,9 +69,7 @@ export async function askAnthropicQuestions({
config.apiKey = key || process.env.ANTHROPIC_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/azure.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import prompts from "prompts";
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
import { questionHandlers } from "../../questions/utils";
Expand Down Expand Up @@ -67,9 +66,7 @@ export async function askAzureQuestions({
},
};

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/gemini.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import prompts from "prompts";
import { ModelConfigParams } from ".";
import { questionHandlers, toChoice } from "../../questions/utils";
Expand Down Expand Up @@ -54,9 +53,7 @@ export async function askGeminiQuestions({
config.apiKey = key || process.env.GOOGLE_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/groq.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import prompts from "prompts";
import { ModelConfigParams } from ".";
import { questionHandlers, toChoice } from "../../questions/utils";
Expand Down Expand Up @@ -110,9 +109,7 @@ export async function askGroqQuestions({
config.apiKey = key || process.env.GROQ_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const modelChoices = await getAvailableModelChoicesGroq(config.apiKey!);

const { model } = await prompts(
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/llmhub.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import got from "got";
import ora from "ora";
import { red } from "picocolors";
Expand Down Expand Up @@ -80,9 +79,7 @@ export async function askLLMHubQuestions({
config.apiKey = key || process.env.T_SYSTEMS_LLMHUB_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/mistral.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import prompts from "prompts";
import { ModelConfigParams } from ".";
import { questionHandlers, toChoice } from "../../questions/utils";
Expand Down Expand Up @@ -53,9 +52,7 @@ export async function askMistralQuestions({
config.apiKey = key || process.env.MISTRAL_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import ollama, { type ModelResponse } from "ollama";
import { red } from "picocolors";
import prompts from "prompts";
Expand Down Expand Up @@ -34,9 +33,7 @@ export async function askOllamaQuestions({
},
};

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
5 changes: 1 addition & 4 deletions helpers/providers/openai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import ciInfo from "ci-info";
import got from "got";
import ora from "ora";
import { red } from "picocolors";
Expand Down Expand Up @@ -54,9 +53,7 @@ export async function askOpenAIQuestions({
config.apiKey = key || process.env.OPENAI_API_KEY;
}

// use default model values in CI or if user should not be asked
const useDefaults = ciInfo.isCI || !askModels;
if (!useDefaults) {
if (askModels) {
const { model } = await prompts(
{
type: "select",
Expand Down
2 changes: 1 addition & 1 deletion helpers/tools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ For better results, you can specify the region parameter to get results from a s
dependencies: [
{
name: "e2b_code_interpreter",
version: "^0.0.11b38",
version: "0.0.11b38",
},
],
supportedFrameworks: ["fastapi", "express", "nextjs"],
Expand Down
2 changes: 1 addition & 1 deletion questions/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { QuestionArgs, QuestionResults } from "./types";
export const askQuestions = async (
args: QuestionArgs,
): Promise<QuestionResults> => {
if (ciInfo.isCI) {
if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
return await getCIQuestionResults(args);
} else if (args.pro) {
// TODO: refactor pro questions to return a result object
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def _generate_html_content(cls, original_content: str) -> str:
Generate HTML content from the original markdown content.
"""
try:
import markdown
import markdown # type: ignore
except ImportError:
raise ImportError(
"Failed to import required modules. Please install markdown."
Expand Down
2 changes: 1 addition & 1 deletion templates/components/engines/python/agent/tools/img_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import uuid
from typing import Optional

import requests
import requests # type: ignore
from llama_index.core.tools import FunctionTool
from pydantic import BaseModel, Field

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Dict, List, Tuple

from llama_index.tools.openapi import OpenAPIToolSpec
from llama_index.tools.requests import RequestsToolSpec

Expand Down Expand Up @@ -43,11 +44,12 @@ def _load_openapi_spec(uri: str) -> Tuple[Dict, List[str]]:
Returns:
List[Document]: A list of Document objects.
"""
import yaml
from urllib.parse import urlparse

import yaml # type: ignore

if uri.startswith("http"):
import requests
import requests # type: ignore

response = requests.get(uri)
if response.status_code != 200:
Expand Down
5 changes: 3 additions & 2 deletions templates/components/engines/python/agent/tools/weather.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Open Meteo weather map tool spec."""

import logging
import requests
import pytz

import pytz # type: ignore
import requests # type: ignore
from llama_index.core.tools import FunctionTool

logger = logging.getLogger(__name__)
Expand Down
6 changes: 3 additions & 3 deletions templates/components/routers/python/sandbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from typing import Any, Dict, List, Optional, Union

from app.engine.tools.artifact import CodeArtifact
from app.engine.utils.file_helper import save_file
from e2b_code_interpreter import CodeInterpreter, Sandbox
from app.services.file import FileService
from e2b_code_interpreter import CodeInterpreter, Sandbox # type: ignore
from fastapi import APIRouter, HTTPException, Request
from pydantic import BaseModel

Expand Down Expand Up @@ -175,7 +175,7 @@ def _download_cell_results(cell_results: Optional[List]) -> List[Dict[str, str]]
base64_data = data
buffer = base64.b64decode(base64_data)
file_name = f"{uuid.uuid4()}.{ext}"
file_meta = save_file(
file_meta = FileService.save_file(
content=buffer,
file_name=file_name,
save_dir=os.path.join("output", "tools"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import logging

from app.engine.index import get_client, get_index
from app.engine.service import LLamaCloudFileService
from app.engine.service import LLamaCloudFileService # type: ignore
from app.settings import init_settings
from llama_cloud import PipelineType
from llama_index.core.readers import SimpleDirectoryReader
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

# Dynamically adding additional routers if they exist
try:
from .sandbox import sandbox_router # noqa: F401
from .sandbox import sandbox_router # type: ignore

api_router.include_router(sandbox_router, prefix="/sandbox")
except ImportError:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,17 @@

def _is_llama_cloud_service_configured():
try:
from app.engine.service import LLamaCloudFileService # noqa
from app.engine.service import (
LLamaCloudFileService, # type: ignore # noqa: F401
)

return True
except ImportError:
return False


async def chat_llama_cloud_config():
from app.engine.service import LLamaCloudFileService
from app.engine.service import LLamaCloudFileService # type: ignore

if not os.getenv("LLAMA_CLOUD_API_KEY"):
raise HTTPException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def _process_response_nodes(
):
try:
# Start background tasks to download documents from LlamaCloud if needed
from app.engine.service import LLamaCloudFileService
from app.engine.service import LLamaCloudFileService # type: ignore

LLamaCloudFileService.download_files_from_nodes(
source_nodes, background_tasks
Expand Down
4 changes: 2 additions & 2 deletions templates/types/streaming/fastapi/app/services/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def _add_file_to_llama_cloud_index(
LlamaCloudIndex is a managed index so we can directly use the files.
"""
try:
from app.engine.service import LLamaCloudFileService
from app.engine.service import LLamaCloudFileService # type: ignore
except ImportError as e:
raise ValueError("LlamaCloudFileService is not found") from e

Expand Down Expand Up @@ -287,7 +287,7 @@ def _default_file_loaders_map():

def _get_available_tools() -> Dict[str, List[FunctionTool]]:
try:
from app.engine.tools import ToolFactory
from app.engine.tools import ToolFactory # type: ignore
except ImportError:
logger.warning("ToolFactory not found, no tools will be available")
return {}
Expand Down
6 changes: 5 additions & 1 deletion templates/types/streaming/fastapi/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,8 @@ ignore_missing_imports = true
follow_imports = "silent"
implicit_optional = true
strict_optional = false
disable_error_code = ["return-value", "import-untyped", "assignment"]
disable_error_code = ["return-value", "assignment"]

[[tool.mypy.overrides]]
module = "app.*"
ignore_missing_imports = false

0 comments on commit 384a136

Please sign in to comment.