From 384a1368dd6d119a0daca55a85b0fb5fa2de4c97 Mon Sep 17 00:00:00 2001 From: Huu Le <39040748+leehuwuj@users.noreply.github.com> Date: Tue, 22 Oct 2024 17:00:52 +0700 Subject: [PATCH] Add mypy checker for importing and update CI condition (#387) --- .changeset/two-masks-design.md | 5 +++++ helpers/providers/anthropic.ts | 5 +---- helpers/providers/azure.ts | 5 +---- helpers/providers/gemini.ts | 5 +---- helpers/providers/groq.ts | 5 +---- helpers/providers/llmhub.ts | 5 +---- helpers/providers/mistral.ts | 5 +---- helpers/providers/ollama.ts | 5 +---- helpers/providers/openai.ts | 5 +---- helpers/tools.ts | 2 +- questions/index.ts | 2 +- .../engines/python/agent/tools/document_generator.py | 2 +- templates/components/engines/python/agent/tools/img_gen.py | 2 +- .../components/engines/python/agent/tools/openapi_action.py | 6 ++++-- templates/components/engines/python/agent/tools/weather.py | 5 +++-- templates/components/routers/python/sandbox.py | 6 +++--- .../components/vectordbs/python/llamacloud/generate.py | 2 +- .../types/streaming/fastapi/app/api/routers/__init__.py | 2 +- .../types/streaming/fastapi/app/api/routers/chat_config.py | 6 ++++-- .../streaming/fastapi/app/api/routers/vercel_response.py | 2 +- templates/types/streaming/fastapi/app/services/file.py | 4 ++-- templates/types/streaming/fastapi/pyproject.toml | 6 +++++- 22 files changed, 41 insertions(+), 51 deletions(-) create mode 100644 .changeset/two-masks-design.md diff --git a/.changeset/two-masks-design.md b/.changeset/two-masks-design.md new file mode 100644 index 000000000..2d3f555c3 --- /dev/null +++ b/.changeset/two-masks-design.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Fix import error if the artifact tool is selected diff --git a/helpers/providers/anthropic.ts b/helpers/providers/anthropic.ts index db7e4f065..080ffdeae 100644 --- a/helpers/providers/anthropic.ts +++ b/helpers/providers/anthropic.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import prompts from "prompts"; import { ModelConfigParams } from "."; import { questionHandlers, toChoice } from "../../questions/utils"; @@ -70,9 +69,7 @@ export async function askAnthropicQuestions({ config.apiKey = key || process.env.ANTHROPIC_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/azure.ts b/helpers/providers/azure.ts index 28250ecee..8f3a3a710 100644 --- a/helpers/providers/azure.ts +++ b/helpers/providers/azure.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import prompts from "prompts"; import { ModelConfigParams, ModelConfigQuestionsParams } from "."; import { questionHandlers } from "../../questions/utils"; @@ -67,9 +66,7 @@ export async function askAzureQuestions({ }, }; - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/gemini.ts b/helpers/providers/gemini.ts index 50096bded..65b556c4d 100644 --- a/helpers/providers/gemini.ts +++ b/helpers/providers/gemini.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import prompts from "prompts"; import { ModelConfigParams } from "."; import { questionHandlers, toChoice } from "../../questions/utils"; @@ -54,9 +53,7 @@ export async function askGeminiQuestions({ config.apiKey = key || process.env.GOOGLE_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/groq.ts b/helpers/providers/groq.ts index 33394475f..61b82a5dc 100644 --- a/helpers/providers/groq.ts +++ b/helpers/providers/groq.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import prompts from "prompts"; import { ModelConfigParams } from "."; import { questionHandlers, toChoice } from "../../questions/utils"; @@ -110,9 +109,7 @@ export async function askGroqQuestions({ config.apiKey = key || process.env.GROQ_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const modelChoices = await getAvailableModelChoicesGroq(config.apiKey!); const { model } = await prompts( diff --git a/helpers/providers/llmhub.ts b/helpers/providers/llmhub.ts index 0e4a610ee..531e5e431 100644 --- a/helpers/providers/llmhub.ts +++ b/helpers/providers/llmhub.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import got from "got"; import ora from "ora"; import { red } from "picocolors"; @@ -80,9 +79,7 @@ export async function askLLMHubQuestions({ config.apiKey = key || process.env.T_SYSTEMS_LLMHUB_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/mistral.ts b/helpers/providers/mistral.ts index c040b412d..1b11ae544 100644 --- a/helpers/providers/mistral.ts +++ b/helpers/providers/mistral.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import prompts from "prompts"; import { ModelConfigParams } from "."; import { questionHandlers, toChoice } from "../../questions/utils"; @@ -53,9 +52,7 @@ export async function askMistralQuestions({ config.apiKey = key || process.env.MISTRAL_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/ollama.ts b/helpers/providers/ollama.ts index cdcbcce64..b9c797e0e 100644 --- a/helpers/providers/ollama.ts +++ b/helpers/providers/ollama.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import ollama, { type ModelResponse } from "ollama"; import { red } from "picocolors"; import prompts from "prompts"; @@ -34,9 +33,7 @@ export async function askOllamaQuestions({ }, }; - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts index 6243f5b56..74f9a42bc 100644 --- a/helpers/providers/openai.ts +++ b/helpers/providers/openai.ts @@ -1,4 +1,3 @@ -import ciInfo from "ci-info"; import got from "got"; import ora from "ora"; import { red } from "picocolors"; @@ -54,9 +53,7 @@ export async function askOpenAIQuestions({ config.apiKey = key || process.env.OPENAI_API_KEY; } - // use default model values in CI or if user should not be asked - const useDefaults = ciInfo.isCI || !askModels; - if (!useDefaults) { + if (askModels) { const { model } = await prompts( { type: "select", diff --git a/helpers/tools.ts b/helpers/tools.ts index 262e71b1d..5199f244d 100644 --- a/helpers/tools.ts +++ b/helpers/tools.ts @@ -170,7 +170,7 @@ For better results, you can specify the region parameter to get results from a s dependencies: [ { name: "e2b_code_interpreter", - version: "^0.0.11b38", + version: "0.0.11b38", }, ], supportedFrameworks: ["fastapi", "express", "nextjs"], diff --git a/questions/index.ts b/questions/index.ts index 03b984e36..7de3c9f65 100644 --- a/questions/index.ts +++ b/questions/index.ts @@ -7,7 +7,7 @@ import { QuestionArgs, QuestionResults } from "./types"; export const askQuestions = async ( args: QuestionArgs, ): Promise => { - if (ciInfo.isCI) { + if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") { return await getCIQuestionResults(args); } else if (args.pro) { // TODO: refactor pro questions to return a result object diff --git a/templates/components/engines/python/agent/tools/document_generator.py b/templates/components/engines/python/agent/tools/document_generator.py index 5609f1467..b97b92a9f 100644 --- a/templates/components/engines/python/agent/tools/document_generator.py +++ b/templates/components/engines/python/agent/tools/document_generator.py @@ -105,7 +105,7 @@ def _generate_html_content(cls, original_content: str) -> str: Generate HTML content from the original markdown content. """ try: - import markdown + import markdown # type: ignore except ImportError: raise ImportError( "Failed to import required modules. Please install markdown." diff --git a/templates/components/engines/python/agent/tools/img_gen.py b/templates/components/engines/python/agent/tools/img_gen.py index 8c2ae7bc0..17cf2d4ae 100644 --- a/templates/components/engines/python/agent/tools/img_gen.py +++ b/templates/components/engines/python/agent/tools/img_gen.py @@ -3,7 +3,7 @@ import uuid from typing import Optional -import requests +import requests # type: ignore from llama_index.core.tools import FunctionTool from pydantic import BaseModel, Field diff --git a/templates/components/engines/python/agent/tools/openapi_action.py b/templates/components/engines/python/agent/tools/openapi_action.py index c19187d2f..dbfd3e1da 100644 --- a/templates/components/engines/python/agent/tools/openapi_action.py +++ b/templates/components/engines/python/agent/tools/openapi_action.py @@ -1,4 +1,5 @@ from typing import Dict, List, Tuple + from llama_index.tools.openapi import OpenAPIToolSpec from llama_index.tools.requests import RequestsToolSpec @@ -43,11 +44,12 @@ def _load_openapi_spec(uri: str) -> Tuple[Dict, List[str]]: Returns: List[Document]: A list of Document objects. """ - import yaml from urllib.parse import urlparse + import yaml # type: ignore + if uri.startswith("http"): - import requests + import requests # type: ignore response = requests.get(uri) if response.status_code != 200: diff --git a/templates/components/engines/python/agent/tools/weather.py b/templates/components/engines/python/agent/tools/weather.py index c8b6f1b4c..981f3771a 100644 --- a/templates/components/engines/python/agent/tools/weather.py +++ b/templates/components/engines/python/agent/tools/weather.py @@ -1,8 +1,9 @@ """Open Meteo weather map tool spec.""" import logging -import requests -import pytz + +import pytz # type: ignore +import requests # type: ignore from llama_index.core.tools import FunctionTool logger = logging.getLogger(__name__) diff --git a/templates/components/routers/python/sandbox.py b/templates/components/routers/python/sandbox.py index 0b07422ee..28c0c3f19 100644 --- a/templates/components/routers/python/sandbox.py +++ b/templates/components/routers/python/sandbox.py @@ -20,8 +20,8 @@ from typing import Any, Dict, List, Optional, Union from app.engine.tools.artifact import CodeArtifact -from app.engine.utils.file_helper import save_file -from e2b_code_interpreter import CodeInterpreter, Sandbox +from app.services.file import FileService +from e2b_code_interpreter import CodeInterpreter, Sandbox # type: ignore from fastapi import APIRouter, HTTPException, Request from pydantic import BaseModel @@ -175,7 +175,7 @@ def _download_cell_results(cell_results: Optional[List]) -> List[Dict[str, str]] base64_data = data buffer = base64.b64decode(base64_data) file_name = f"{uuid.uuid4()}.{ext}" - file_meta = save_file( + file_meta = FileService.save_file( content=buffer, file_name=file_name, save_dir=os.path.join("output", "tools"), diff --git a/templates/components/vectordbs/python/llamacloud/generate.py b/templates/components/vectordbs/python/llamacloud/generate.py index 6be271bd5..acd28777e 100644 --- a/templates/components/vectordbs/python/llamacloud/generate.py +++ b/templates/components/vectordbs/python/llamacloud/generate.py @@ -8,7 +8,7 @@ import logging from app.engine.index import get_client, get_index -from app.engine.service import LLamaCloudFileService +from app.engine.service import LLamaCloudFileService # type: ignore from app.settings import init_settings from llama_cloud import PipelineType from llama_index.core.readers import SimpleDirectoryReader diff --git a/templates/types/streaming/fastapi/app/api/routers/__init__.py b/templates/types/streaming/fastapi/app/api/routers/__init__.py index 8c897aa51..6c2654e5f 100644 --- a/templates/types/streaming/fastapi/app/api/routers/__init__.py +++ b/templates/types/streaming/fastapi/app/api/routers/__init__.py @@ -11,7 +11,7 @@ # Dynamically adding additional routers if they exist try: - from .sandbox import sandbox_router # noqa: F401 + from .sandbox import sandbox_router # type: ignore api_router.include_router(sandbox_router, prefix="/sandbox") except ImportError: diff --git a/templates/types/streaming/fastapi/app/api/routers/chat_config.py b/templates/types/streaming/fastapi/app/api/routers/chat_config.py index 495ee99be..e3cacfc93 100644 --- a/templates/types/streaming/fastapi/app/api/routers/chat_config.py +++ b/templates/types/streaming/fastapi/app/api/routers/chat_config.py @@ -12,7 +12,9 @@ def _is_llama_cloud_service_configured(): try: - from app.engine.service import LLamaCloudFileService # noqa + from app.engine.service import ( + LLamaCloudFileService, # type: ignore # noqa: F401 + ) return True except ImportError: @@ -20,7 +22,7 @@ def _is_llama_cloud_service_configured(): async def chat_llama_cloud_config(): - from app.engine.service import LLamaCloudFileService + from app.engine.service import LLamaCloudFileService # type: ignore if not os.getenv("LLAMA_CLOUD_API_KEY"): raise HTTPException( diff --git a/templates/types/streaming/fastapi/app/api/routers/vercel_response.py b/templates/types/streaming/fastapi/app/api/routers/vercel_response.py index fc5f03e03..1155f6ba7 100644 --- a/templates/types/streaming/fastapi/app/api/routers/vercel_response.py +++ b/templates/types/streaming/fastapi/app/api/routers/vercel_response.py @@ -138,7 +138,7 @@ def _process_response_nodes( ): try: # Start background tasks to download documents from LlamaCloud if needed - from app.engine.service import LLamaCloudFileService + from app.engine.service import LLamaCloudFileService # type: ignore LLamaCloudFileService.download_files_from_nodes( source_nodes, background_tasks diff --git a/templates/types/streaming/fastapi/app/services/file.py b/templates/types/streaming/fastapi/app/services/file.py index 02e008441..a551ea5f1 100644 --- a/templates/types/streaming/fastapi/app/services/file.py +++ b/templates/types/streaming/fastapi/app/services/file.py @@ -241,7 +241,7 @@ def _add_file_to_llama_cloud_index( LlamaCloudIndex is a managed index so we can directly use the files. """ try: - from app.engine.service import LLamaCloudFileService + from app.engine.service import LLamaCloudFileService # type: ignore except ImportError as e: raise ValueError("LlamaCloudFileService is not found") from e @@ -287,7 +287,7 @@ def _default_file_loaders_map(): def _get_available_tools() -> Dict[str, List[FunctionTool]]: try: - from app.engine.tools import ToolFactory + from app.engine.tools import ToolFactory # type: ignore except ImportError: logger.warning("ToolFactory not found, no tools will be available") return {} diff --git a/templates/types/streaming/fastapi/pyproject.toml b/templates/types/streaming/fastapi/pyproject.toml index 6a8941607..400991c52 100644 --- a/templates/types/streaming/fastapi/pyproject.toml +++ b/templates/types/streaming/fastapi/pyproject.toml @@ -36,4 +36,8 @@ ignore_missing_imports = true follow_imports = "silent" implicit_optional = true strict_optional = false -disable_error_code = ["return-value", "import-untyped", "assignment"] +disable_error_code = ["return-value", "assignment"] + +[[tool.mypy.overrides]] +module = "app.*" +ignore_missing_imports = false