diff --git a/helpers/providers/anthropic.ts b/helpers/providers/anthropic.ts index 080ffdeae..818e3ceb8 100644 --- a/helpers/providers/anthropic.ts +++ b/helpers/providers/anthropic.ts @@ -1,6 +1,6 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; const MODELS = [ "claude-3-opus", @@ -57,41 +57,36 @@ export async function askAnthropicQuestions({ }; if (!config.apiKey) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: "Please provide your Anthropic API key (or leave blank to use ANTHROPIC_API_KEY env variable):", }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.ANTHROPIC_API_KEY; } if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: MODELS.map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[ diff --git a/helpers/providers/azure.ts b/helpers/providers/azure.ts index 8f3a3a710..2c57aad99 100644 --- a/helpers/providers/azure.ts +++ b/helpers/providers/azure.ts @@ -1,6 +1,5 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams, ModelConfigQuestionsParams } from "."; -import { questionHandlers } from "../../questions/utils"; const ALL_AZURE_OPENAI_CHAT_MODELS: Record = { "gpt-35-turbo": { openAIModel: "gpt-3.5-turbo" }, @@ -67,28 +66,24 @@ export async function askAzureQuestions({ }; if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: getAvailableModelChoices(), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: getAvailableEmbeddingModelChoices(), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = getDimensions(embeddingModel); } @@ -98,14 +93,14 @@ export async function askAzureQuestions({ function getAvailableModelChoices() { return Object.keys(ALL_AZURE_OPENAI_CHAT_MODELS).map((key) => ({ - title: key, + name: key, value: key, })); } function getAvailableEmbeddingModelChoices() { return Object.keys(ALL_AZURE_OPENAI_EMBEDDING_MODELS).map((key) => ({ - title: key, + name: key, value: key, })); } diff --git a/helpers/providers/gemini.ts b/helpers/providers/gemini.ts index 65b556c4d..e5e7ae3fb 100644 --- a/helpers/providers/gemini.ts +++ b/helpers/providers/gemini.ts @@ -1,6 +1,6 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; const MODELS = ["gemini-1.5-pro-latest", "gemini-pro", "gemini-pro-vision"]; type ModelData = { @@ -41,41 +41,36 @@ export async function askGeminiQuestions({ }; if (!config.apiKey) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: "Please provide your Google API key (or leave blank to use GOOGLE_API_KEY env variable):", }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.GOOGLE_API_KEY; } if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: MODELS.map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[embeddingModel].dimensions; } diff --git a/helpers/providers/groq.ts b/helpers/providers/groq.ts index 61b82a5dc..8ac010a49 100644 --- a/helpers/providers/groq.ts +++ b/helpers/providers/groq.ts @@ -1,6 +1,6 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; import got from "got"; import ora from "ora"; @@ -97,43 +97,38 @@ export async function askGroqQuestions({ }; if (!config.apiKey) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: "Please provide your Groq API key (or leave blank to use GROQ_API_KEY env variable):", }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.GROQ_API_KEY; } if (askModels) { const modelChoices = await getAvailableModelChoicesGroq(config.apiKey!); - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: modelChoices, - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[ diff --git a/helpers/providers/huggingface.ts b/helpers/providers/huggingface.ts index 039b8e323..22485214e 100644 --- a/helpers/providers/huggingface.ts +++ b/helpers/providers/huggingface.ts @@ -1,6 +1,6 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; const MODELS = ["HuggingFaceH4/zephyr-7b-alpha"]; type ModelData = { @@ -38,28 +38,24 @@ export async function askHuggingfaceQuestions({ }; if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which Hugging Face model would you like to use?", choices: MODELS.map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[embeddingModel].dimensions; } diff --git a/helpers/providers/index.ts b/helpers/providers/index.ts index a75302981..a19597d45 100644 --- a/helpers/providers/index.ts +++ b/helpers/providers/index.ts @@ -1,5 +1,4 @@ -import prompts from "prompts"; -import { questionHandlers } from "../../questions/utils"; +import inquirer from "inquirer"; import { ModelConfig, ModelProvider, TemplateFramework } from "../types"; import { askAnthropicQuestions } from "./anthropic"; import { askAzureQuestions } from "./azure"; @@ -29,29 +28,27 @@ export async function askModelConfig({ let modelProvider: ModelProvider = DEFAULT_MODEL_PROVIDER; if (askModels) { let choices = [ - { title: "OpenAI", value: "openai" }, - { title: "Groq", value: "groq" }, - { title: "Ollama", value: "ollama" }, - { title: "Anthropic", value: "anthropic" }, - { title: "Gemini", value: "gemini" }, - { title: "Mistral", value: "mistral" }, - { title: "AzureOpenAI", value: "azure-openai" }, + { name: "OpenAI", value: "openai" }, + { name: "Groq", value: "groq" }, + { name: "Ollama", value: "ollama" }, + { name: "Anthropic", value: "anthropic" }, + { name: "Gemini", value: "gemini" }, + { name: "Mistral", value: "mistral" }, + { name: "AzureOpenAI", value: "azure-openai" }, ]; if (framework === "fastapi") { - choices.push({ title: "T-Systems", value: "t-systems" }); - choices.push({ title: "Huggingface", value: "huggingface" }); + choices.push({ name: "T-Systems", value: "t-systems" }); + choices.push({ name: "Huggingface", value: "huggingface" }); } - const { provider } = await prompts( + const { provider } = await inquirer.prompt([ { - type: "select", + type: "list", name: "provider", message: "Which model provider would you like to use", choices: choices, - initial: 0, }, - questionHandlers, - ); + ]); modelProvider = provider; } diff --git a/helpers/providers/llmhub.ts b/helpers/providers/llmhub.ts index 531e5e431..2bcb37c12 100644 --- a/helpers/providers/llmhub.ts +++ b/helpers/providers/llmhub.ts @@ -1,9 +1,8 @@ import got from "got"; +import inquirer from "inquirer"; import ora from "ora"; import { red } from "picocolors"; -import prompts from "prompts"; import { ModelConfigParams } from "."; -import { questionHandlers } from "../../questions/utils"; export const TSYSTEMS_LLMHUB_API_URL = "https://llm-server.llmhub.t-systems.net/v2"; @@ -57,9 +56,9 @@ export async function askLLMHubQuestions({ }; if (!config.apiKey) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: askModels ? "Please provide your LLMHub API key (or leave blank to use T_SYSTEMS_LLMHUB_API_KEY env variable):" @@ -74,34 +73,29 @@ export async function askLLMHubQuestions({ return true; }, }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.T_SYSTEMS_LLMHUB_API_KEY; } if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: await getAvailableModelChoices(false, config.apiKey), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: await getAvailableModelChoices(true, config.apiKey), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = getDimensions(embeddingModel); } @@ -141,7 +135,7 @@ async function getAvailableModelChoices( ) .map((el: any) => { return { - title: el.id, + name: el.id, value: el.id, }; }); diff --git a/helpers/providers/mistral.ts b/helpers/providers/mistral.ts index 1b11ae544..3e3188529 100644 --- a/helpers/providers/mistral.ts +++ b/helpers/providers/mistral.ts @@ -1,6 +1,6 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; const MODELS = ["mistral-tiny", "mistral-small", "mistral-medium"]; type ModelData = { @@ -40,41 +40,36 @@ export async function askMistralQuestions({ }; if (!config.apiKey) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: "Please provide your Mistral API key (or leave blank to use MISTRAL_API_KEY env variable):", }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.MISTRAL_API_KEY; } if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: MODELS.map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[embeddingModel].dimensions; } diff --git a/helpers/providers/ollama.ts b/helpers/providers/ollama.ts index b9c797e0e..83de72487 100644 --- a/helpers/providers/ollama.ts +++ b/helpers/providers/ollama.ts @@ -1,8 +1,8 @@ +import inquirer from "inquirer"; import ollama, { type ModelResponse } from "ollama"; import { red } from "picocolors"; -import prompts from "prompts"; import { ModelConfigParams } from "."; -import { questionHandlers, toChoice } from "../../questions/utils"; +import { toChoice } from "../../questions/utils"; type ModelData = { dimensions: number; @@ -34,29 +34,25 @@ export async function askOllamaQuestions({ }; if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: MODELS.map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); await ensureModel(model); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: Object.keys(EMBEDDING_MODELS).map(toChoice), - initial: 0, }, - questionHandlers, - ); + ]); await ensureModel(embeddingModel); config.embeddingModel = embeddingModel; config.dimensions = EMBEDDING_MODELS[embeddingModel].dimensions; diff --git a/helpers/providers/openai.ts b/helpers/providers/openai.ts index c26ff4c4f..05cb30aa2 100644 --- a/helpers/providers/openai.ts +++ b/helpers/providers/openai.ts @@ -1,10 +1,9 @@ import got from "got"; +import inquirer from "inquirer"; import ora from "ora"; import { red } from "picocolors"; -import prompts from "prompts"; import { ModelConfigParams, ModelConfigQuestionsParams } from "."; import { isCI } from "../../questions"; -import { questionHandlers } from "../../questions/utils"; const OPENAI_API_URL = "https://api.openai.com/v1"; @@ -32,9 +31,9 @@ export async function askOpenAIQuestions({ }; if (!config.apiKey && !isCI) { - const { key } = await prompts( + const { key } = await inquirer.prompt([ { - type: "text", + type: "input", name: "key", message: askModels ? "Please provide your OpenAI API key (or leave blank to use OPENAI_API_KEY env variable):" @@ -49,34 +48,29 @@ export async function askOpenAIQuestions({ return true; }, }, - questionHandlers, - ); + ]); config.apiKey = key || process.env.OPENAI_API_KEY; } if (askModels) { - const { model } = await prompts( + const { model } = await inquirer.prompt([ { - type: "select", + type: "list", name: "model", message: "Which LLM model would you like to use?", choices: await getAvailableModelChoices(false, config.apiKey), - initial: 0, }, - questionHandlers, - ); + ]); config.model = model; - const { embeddingModel } = await prompts( + const { embeddingModel } = await inquirer.prompt([ { - type: "select", + type: "list", name: "embeddingModel", message: "Which embedding model would you like to use?", choices: await getAvailableModelChoices(true, config.apiKey), - initial: 0, }, - questionHandlers, - ); + ]); config.embeddingModel = embeddingModel; config.dimensions = getDimensions(embeddingModel); } @@ -116,7 +110,7 @@ async function getAvailableModelChoices( ) .map((el: any) => { return { - title: el.id, + name: el.id, value: el.id, }; }); diff --git a/index.ts b/index.ts index 370bd1e8b..e0854ee6c 100644 --- a/index.ts +++ b/index.ts @@ -2,9 +2,9 @@ import { execSync } from "child_process"; import { Command } from "commander"; import fs from "fs"; +import inquirer from "inquirer"; import path from "path"; import { bold, cyan, green, red, yellow } from "picocolors"; -import prompts from "prompts"; import terminalLink from "terminal-link"; import checkForUpdate from "update-check"; import { createApp } from "./create-app"; @@ -18,7 +18,6 @@ import { validateNpmName } from "./helpers/validate-pkg"; import packageJson from "./package.json"; import { askQuestions } from "./questions/index"; import { QuestionArgs } from "./questions/types"; -import { onPromptState } from "./questions/utils"; // Run the initialization function initializeGlobalAgent(); @@ -156,12 +155,11 @@ const program = new Command(packageJson.name) Specify the tools you want to use by providing a comma-separated list. For example, 'wikipedia.WikipediaToolSpec,google.GoogleSearchToolSpec'. Use 'none' to not using any tools. `, - (tools, _) => { - if (tools === "none") { + (tools) => { + if (!tools || tools === "none") { return []; - } else { - return getTools(tools.split(",")); } + return getTools(tools.split(",")); }, ) .option( @@ -263,24 +261,22 @@ async function run(): Promise { } if (!projectPath) { - const res = await prompts({ - onState: onPromptState, - type: "text", - name: "path", - message: "What is your project named?", - initial: "my-app", - validate: (name) => { - const validation = validateNpmName(path.basename(path.resolve(name))); - if (validation.valid) { - return true; - } - return "Invalid project name: " + validation.problems![0]; + const { path: projectPathAnswer } = await inquirer.prompt([ + { + type: "input", + name: "path", + message: "What is your project named?", + default: "my-app", + validate(name) { + const validation = validateNpmName(path.basename(path.resolve(name))); + if (validation.valid) { + return true; + } + return "Invalid project name" + validation.problems![0]; + }, }, - }); - - if (typeof res.path === "string") { - projectPath = res.path.trim(); - } + ]); + projectPath = projectPathAnswer; } if (!projectPath) { diff --git a/package.json b/package.json index 39eab8a25..eab06c909 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "@types/cross-spawn": "6.0.0", "@types/fs-extra": "11.0.4", "@types/node": "^20.11.7", - "@types/prompts": "2.4.2", + "@types/inquirer": "^9.0.7", "@types/tar": "6.1.5", "@types/validate-npm-package-name": "3.0.0", "async-retry": "1.3.1", @@ -58,7 +58,7 @@ "ollama": "^0.5.0", "ora": "^8.0.1", "picocolors": "1.0.0", - "prompts": "2.4.2", + "inquirer": "^9.2.14", "smol-toml": "^1.1.4", "tar": "6.1.15", "terminal-link": "^3.0.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 42aa06cf8..acf9e14da 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -20,12 +20,12 @@ importers: '@types/fs-extra': specifier: 11.0.4 version: 11.0.4 + '@types/inquirer': + specifier: ^9.0.7 + version: 9.0.7 '@types/node': specifier: ^20.11.7 version: 20.12.10 - '@types/prompts': - specifier: 2.4.2 - version: 2.4.2 '@types/tar': specifier: 6.1.5 version: 6.1.5 @@ -59,6 +59,9 @@ importers: got: specifier: 10.7.0 version: 10.7.0 + inquirer: + specifier: ^9.2.14 + version: 9.3.7 ollama: specifier: ^0.5.0 version: 0.5.0 @@ -68,9 +71,6 @@ importers: picocolors: specifier: 1.0.0 version: 1.0.0 - prompts: - specifier: 2.4.2 - version: 2.4.2 smol-toml: specifier: ^1.1.4 version: 1.1.4 @@ -223,6 +223,10 @@ packages: '@humanwhocodes/object-schema@2.0.3': resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} + '@inquirer/figures@1.0.9': + resolution: {integrity: sha512-BXvGj0ehzrngHTPTDqUoDT3NXL8U0RxUk2zJm2A66RhCEIWdtU1v6GuUqNAgArW4PQ9CinqIWyHdQgdwOj06zQ==} + engines: {node: '>=18'} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -280,6 +284,9 @@ packages: '@types/http-cache-semantics@4.0.4': resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} + '@types/inquirer@9.0.7': + resolution: {integrity: sha512-Q0zyBupO6NxGRZut/JdmqYKOnN95Eg5V8Csg3PGKkP+FnvsUZx1jAyK7fztIszxxMuoBA6E3KXWvdZVXIpx60g==} + '@types/jsonfile@6.1.4': resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} @@ -298,9 +305,6 @@ packages: '@types/normalize-package-data@2.4.4': resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} - '@types/prompts@2.4.2': - resolution: {integrity: sha512-TwNx7qsjvRIUv/BCx583tqF5IINEVjCNqg9ofKHRlSoUHE62WBHrem4B1HGXcIrG511v29d1kJ9a/t2Esz7MIg==} - '@types/responselike@1.0.3': resolution: {integrity: sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==} @@ -313,6 +317,9 @@ packages: '@types/tar@6.1.5': resolution: {integrity: sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==} + '@types/through@0.0.33': + resolution: {integrity: sha512-HsJ+z3QuETzP3cswwtzt2vEIiHBk/dCcHGhbmG5X3ecnwFD/lPrMpliGXxSCg03L9AhrdwA4Oz/qfspkDW+xGQ==} + '@types/validate-npm-package-name@3.0.0': resolution: {integrity: sha512-iFNNIrEaJH1lbPiyX+O/QyxSbKxrTjdNBVZGckt+iEL9So0hdZNBL68sOfHnt2txuUD8UJXvmKv/1DkgkebgUg==} @@ -340,6 +347,10 @@ packages: resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} engines: {node: '>=6'} + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + ansi-escapes@5.0.0: resolution: {integrity: sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==} engines: {node: '>=12'} @@ -403,10 +414,16 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + better-path-resolve@1.0.0: resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} engines: {node: '>=4'} + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + boolean@3.2.0: resolution: {integrity: sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==} @@ -423,6 +440,9 @@ packages: breakword@1.0.6: resolution: {integrity: sha512-yjxDAYyK/pBvws9H4xKYpLDpYKEH6CzrBPAuXq3x18I+c/2MkVtT3qAr7Oloi6Dss9qNhPVueAAVU1CSeNDIXw==} + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + builtins@1.0.3: resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} @@ -477,6 +497,10 @@ packages: resolution: {tarball: https://codeload.github.com/watson/ci-info/tar.gz/f43f6a1cefff47fb361c88cf4b943fdbcaafe540} version: 2.0.0 + cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + cli-cursor@4.0.0: resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -485,6 +509,10 @@ packages: resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} engines: {node: '>=6'} + cli-width@4.1.0: + resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} + engines: {node: '>= 12'} + cliui@6.0.0: resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} @@ -934,6 +962,9 @@ packages: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + ignore@5.3.1: resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} engines: {node: '>= 4'} @@ -959,6 +990,10 @@ packages: ini@1.3.8: resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + inquirer@9.3.7: + resolution: {integrity: sha512-LJKFHCSeIRq9hanN14IlOtPSTe3lNES7TYDTE2xxdAy1LS5rYphajK1qtwvj3YmQXvvk0U2Vbmcni8P9EIQW9w==} + engines: {node: '>=18'} + internal-slot@1.0.7: resolution: {integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==} engines: {node: '>= 0.4'} @@ -1004,6 +1039,10 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} + is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + is-interactive@2.0.0: resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} engines: {node: '>=12'} @@ -1052,6 +1091,10 @@ packages: resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==} engines: {node: '>= 0.4'} + is-unicode-supported@0.1.0: + resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} + engines: {node: '>=10'} + is-unicode-supported@1.3.0: resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} engines: {node: '>=12'} @@ -1116,10 +1159,6 @@ packages: resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} engines: {node: '>=0.10.0'} - kleur@3.0.3: - resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} - engines: {node: '>=6'} - kleur@4.1.5: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} @@ -1149,6 +1188,10 @@ packages: lodash.startcase@4.4.0: resolution: {integrity: sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==} + log-symbols@4.1.0: + resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} + engines: {node: '>=10'} + log-symbols@6.0.0: resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} engines: {node: '>=18'} @@ -1250,6 +1293,10 @@ packages: ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + mute-stream@1.0.0: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -1285,6 +1332,10 @@ packages: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} + ora@5.4.1: + resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} + engines: {node: '>=10'} + ora@8.0.1: resolution: {integrity: sha512-ANIvzobt1rls2BDny5fWZ3ZVKyD6nscLvfFRpQgfWsythlcsVUC9kL0zq6j2Z5z9wwp1kd7wpsD/T9qNPVLCaQ==} engines: {node: '>=18'} @@ -1431,10 +1482,6 @@ packages: engines: {node: '>=14'} hasBin: true - prompts@2.4.2: - resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} - engines: {node: '>= 6'} - pseudomap@1.0.2: resolution: {integrity: sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==} @@ -1468,6 +1515,10 @@ packages: resolution: {integrity: sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA==} engines: {node: '>=6'} + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + redent@3.0.0: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} @@ -1508,6 +1559,10 @@ packages: responselike@2.0.1: resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==} + restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + restore-cursor@4.0.0: resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -1533,9 +1588,16 @@ packages: resolution: {integrity: sha512-CHhPh+UNHD2GTXNYhPWLnU8ONHdI+5DI+4EYIAOaiD63rHeYlZvyh8P+in5999TTSFgUYuKUAjzRI4mdh/p+2A==} engines: {node: '>=8.0'} + run-async@3.0.0: + resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} + engines: {node: '>=0.12.0'} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + rxjs@7.8.1: + resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} + safe-array-concat@1.1.2: resolution: {integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==} engines: {node: '>=0.4'} @@ -1604,9 +1666,6 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} - sisteransi@1.0.5: - resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} - slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -1671,6 +1730,9 @@ packages: resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} engines: {node: '>= 0.4'} + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -1742,6 +1804,9 @@ packages: resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} engines: {node: '>=8'} + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + tty-table@4.2.3: resolution: {integrity: sha512-Fs15mu0vGzCrj8fmJNP7Ynxt5J7praPXqFN0leZeZBXJwkMxv9cb2D454k1ltrtUSJbZ4yH4e0CynsHLxmUfFA==} engines: {node: '>=8.0.0'} @@ -1763,6 +1828,10 @@ packages: resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} engines: {node: '>=10'} + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + type-fest@0.6.0: resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} engines: {node: '>=8'} @@ -1816,6 +1885,9 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} @@ -1913,6 +1985,10 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + yoctocolors-cjs@2.1.2: + resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} + engines: {node: '>=18'} + snapshots: '@babel/code-frame@7.24.2': @@ -2116,6 +2192,8 @@ snapshots: '@humanwhocodes/object-schema@2.0.3': {} + '@inquirer/figures@1.0.9': {} + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -2190,6 +2268,11 @@ snapshots: '@types/http-cache-semantics@4.0.4': {} + '@types/inquirer@9.0.7': + dependencies: + '@types/through': 0.0.33 + rxjs: 7.8.1 + '@types/jsonfile@6.1.4': dependencies: '@types/node': 20.12.10 @@ -2208,11 +2291,6 @@ snapshots: '@types/normalize-package-data@2.4.4': {} - '@types/prompts@2.4.2': - dependencies: - '@types/node': 20.12.10 - kleur: 3.0.3 - '@types/responselike@1.0.3': dependencies: '@types/node': 20.12.10 @@ -2226,6 +2304,10 @@ snapshots: '@types/node': 20.12.10 minipass: 4.2.8 + '@types/through@0.0.33': + dependencies: + '@types/node': 20.12.10 + '@types/validate-npm-package-name@3.0.0': {} '@ungap/structured-clone@1.2.0': {} @@ -2247,6 +2329,10 @@ snapshots: ansi-colors@4.1.3: {} + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + ansi-escapes@5.0.0: dependencies: type-fest: 1.4.0 @@ -2310,10 +2396,18 @@ snapshots: balanced-match@1.0.2: {} + base64-js@1.5.1: {} + better-path-resolve@1.0.0: dependencies: is-windows: 1.0.2 + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + boolean@3.2.0: {} brace-expansion@1.1.11: @@ -2333,6 +2427,11 @@ snapshots: dependencies: wcwidth: 1.0.1 + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + builtins@1.0.3: {} cacheable-lookup@2.0.1: @@ -2389,12 +2488,18 @@ snapshots: ci-info@https://codeload.github.com/watson/ci-info/tar.gz/f43f6a1cefff47fb361c88cf4b943fdbcaafe540: {} + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + cli-cursor@4.0.0: dependencies: restore-cursor: 4.0.0 cli-spinners@2.9.2: {} + cli-width@4.1.0: {} + cliui@6.0.0: dependencies: string-width: 4.2.3 @@ -2947,6 +3052,8 @@ snapshots: dependencies: safer-buffer: 2.1.2 + ieee754@1.2.1: {} + ignore@5.3.1: {} import-fresh@3.3.0: @@ -2967,6 +3074,21 @@ snapshots: ini@1.3.8: {} + inquirer@9.3.7: + dependencies: + '@inquirer/figures': 1.0.9 + ansi-escapes: 4.3.2 + cli-width: 4.1.0 + external-editor: 3.1.0 + mute-stream: 1.0.0 + ora: 5.4.1 + run-async: 3.0.0 + rxjs: 7.8.1 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.2 + internal-slot@1.0.7: dependencies: es-errors: 1.3.0 @@ -3011,6 +3133,8 @@ snapshots: dependencies: is-extglob: 2.1.1 + is-interactive@1.0.0: {} + is-interactive@2.0.0: {} is-negative-zero@2.0.3: {} @@ -3050,6 +3174,8 @@ snapshots: dependencies: which-typed-array: 1.1.15 + is-unicode-supported@0.1.0: {} + is-unicode-supported@1.3.0: {} is-unicode-supported@2.0.0: {} @@ -3107,8 +3233,6 @@ snapshots: kind-of@6.0.3: {} - kleur@3.0.3: {} - kleur@4.1.5: {} levn@0.4.1: @@ -3137,6 +3261,11 @@ snapshots: lodash.startcase@4.4.0: {} + log-symbols@4.1.0: + dependencies: + chalk: 4.1.2 + is-unicode-supported: 0.1.0 + log-symbols@6.0.0: dependencies: chalk: 5.3.0 @@ -3225,6 +3354,8 @@ snapshots: ms@2.1.2: {} + mute-stream@1.0.0: {} + natural-compare@1.4.0: {} normalize-package-data@2.5.0: @@ -3268,6 +3399,18 @@ snapshots: type-check: 0.4.0 word-wrap: 1.2.5 + ora@5.4.1: + dependencies: + bl: 4.1.0 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.9.2 + is-interactive: 1.0.0 + is-unicode-supported: 0.1.0 + log-symbols: 4.1.0 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + ora@8.0.1: dependencies: chalk: 5.3.0 @@ -3384,11 +3527,6 @@ snapshots: prettier@3.2.5: {} - prompts@2.4.2: - dependencies: - kleur: 3.0.3 - sisteransi: 1.0.5 - pseudomap@1.0.2: {} pump@3.0.0: @@ -3429,6 +3567,12 @@ snapshots: pify: 4.0.1 strip-bom: 3.0.0 + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + redent@3.0.0: dependencies: indent-string: 4.0.0 @@ -3470,6 +3614,11 @@ snapshots: dependencies: lowercase-keys: 2.0.0 + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + restore-cursor@4.0.0: dependencies: onetime: 5.1.2 @@ -3496,10 +3645,16 @@ snapshots: semver-compare: 1.0.0 sprintf-js: 1.1.3 + run-async@3.0.0: {} + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 + rxjs@7.8.1: + dependencies: + tslib: 2.8.1 + safe-array-concat@1.1.2: dependencies: call-bind: 1.0.7 @@ -3568,8 +3723,6 @@ snapshots: signal-exit@4.1.0: {} - sisteransi@1.0.5: {} - slash@3.0.0: {} smartwrap@2.0.2: @@ -3649,6 +3802,10 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.0.0 + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -3712,6 +3869,8 @@ snapshots: trim-newlines@3.0.1: {} + tslib@2.8.1: {} + tty-table@4.2.3: dependencies: chalk: 4.1.2 @@ -3732,6 +3891,8 @@ snapshots: type-fest@0.20.2: {} + type-fest@0.21.3: {} + type-fest@0.6.0: {} type-fest@0.8.1: {} @@ -3794,6 +3955,8 @@ snapshots: dependencies: punycode: 2.3.1 + util-deprecate@1.0.2: {} + validate-npm-package-license@3.0.4: dependencies: spdx-correct: 3.2.0 @@ -3912,3 +4075,5 @@ snapshots: yargs-parser: 21.1.1 yocto-queue@0.1.0: {} + + yoctocolors-cjs@2.1.2: {} diff --git a/questions/questions.ts b/questions/questions.ts index 559839975..d7e33c2c8 100644 --- a/questions/questions.ts +++ b/questions/questions.ts @@ -1,5 +1,5 @@ +import inquirer, { QuestionCollection } from "inquirer"; import { blue } from "picocolors"; -import prompts from "prompts"; import { isCI } from "."; import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant"; import { EXAMPLE_FILE, EXAMPLE_GDPR } from "../helpers/datasources"; @@ -10,43 +10,36 @@ import { supportedTools, toolRequiresConfig } from "../helpers/tools"; import { getDataSourceChoices } from "./datasources"; import { getVectorDbChoices } from "./stores"; import { QuestionArgs } from "./types"; -import { - askPostInstallAction, - onPromptState, - questionHandlers, - selectLocalContextData, -} from "./utils"; +import { askPostInstallAction, selectLocalContextData } from "./utils"; export const askProQuestions = async (program: QuestionArgs) => { if (!program.template) { const styledRepo = blue( `https://github.com/${COMMUNITY_OWNER}/${COMMUNITY_REPO}`, ); - const { template } = await prompts( + const { template } = await inquirer.prompt([ { - type: "select", + type: "list", name: "template", message: "Which template would you like to use?", choices: [ - { title: "Agentic RAG (e.g. chat with docs)", value: "streaming" }, + { name: "Agentic RAG (e.g. chat with docs)", value: "streaming" }, { - title: "Multi-agent app (using workflows)", + name: "Multi-agent app (using workflows)", value: "multiagent", }, - { title: "Fullstack python template with Reflex", value: "reflex" }, + { name: "Fullstack python template with Reflex", value: "reflex" }, { - title: `Community template from ${styledRepo}`, + name: `Community template from ${styledRepo}`, value: "community", }, { - title: "Example using a LlamaPack", + name: "Example using a LlamaPack", value: "llamapack", }, ], - initial: 0, }, - questionHandlers, - ); + ]); program.template = template; } @@ -55,19 +48,17 @@ export const askProQuestions = async (program: QuestionArgs) => { COMMUNITY_OWNER, COMMUNITY_REPO, ); - const { communityProjectConfig } = await prompts( + const { communityProjectConfig } = await inquirer.prompt([ { - type: "select", + type: "list", name: "communityProjectConfig", message: "Select community template", choices: projectOptions.map(({ title, value }) => ({ - title, + name: title, value: JSON.stringify(value), // serialize value to string in terminal })), - initial: 0, }, - questionHandlers, - ); + ]); const projectConfig = JSON.parse(communityProjectConfig); program.communityProjectConfig = projectConfig; return; // early return - no further questions needed for community projects @@ -75,19 +66,17 @@ export const askProQuestions = async (program: QuestionArgs) => { if (program.template === "llamapack") { const availableLlamaPacks = await getAvailableLlamapackOptions(); - const { llamapack } = await prompts( + const { llamapack } = await inquirer.prompt([ { - type: "select", + type: "list", name: "llamapack", message: "Select LlamaPack", choices: availableLlamaPacks.map((pack) => ({ - title: pack.name, + name: pack.name, value: pack.folderPath, })), - initial: 0, }, - questionHandlers, - ); + ]); program.llamapack = llamapack; if (!program.postInstallAction) { program.postInstallAction = await askPostInstallAction(program); @@ -101,42 +90,38 @@ export const askProQuestions = async (program: QuestionArgs) => { program.dataSources = [EXAMPLE_FILE]; program.framework = "fastapi"; // Ask for which Reflex use case to use - const { useCase } = await prompts( + const { useCase } = await inquirer.prompt([ { - type: "select", + type: "list", name: "useCase", message: "Which use case would you like to build?", choices: [ - { title: "Structured Extractor", value: "extractor" }, + { name: "Structured Extractor", value: "extractor" }, { - title: "Contract review (using Workflow)", + name: "Contract review (using Workflow)", value: "contract_review", }, ], - initial: 0, }, - questionHandlers, - ); + ]); program.useCase = useCase; } if (!program.framework) { const choices = [ - { title: "NextJS", value: "nextjs" }, - { title: "Express", value: "express" }, - { title: "FastAPI (Python)", value: "fastapi" }, + { name: "NextJS", value: "nextjs" }, + { name: "Express", value: "express" }, + { name: "FastAPI (Python)", value: "fastapi" }, ]; - const { framework } = await prompts( + const { framework } = await inquirer.prompt([ { - type: "select", + type: "list", name: "framework", message: "Which framework would you like to use?", choices, - initial: 0, }, - questionHandlers, - ); + ]); program.framework = framework; } @@ -147,16 +132,15 @@ export const askProQuestions = async (program: QuestionArgs) => { // if a backend-only framework is selected, ask whether we should create a frontend if (program.frontend === undefined) { const styledNextJS = blue("NextJS"); - const { frontend } = await prompts({ - onState: onPromptState, - type: "toggle", - name: "frontend", - message: `Would you like to generate a ${styledNextJS} frontend for your FastAPI backend?`, - initial: false, - active: "Yes", - inactive: "No", - }); - program.frontend = Boolean(frontend); + const { frontend } = await inquirer.prompt([ + { + type: "confirm", + name: "frontend", + message: `Would you like to generate a ${styledNextJS} frontend for your FastAPI backend?`, + default: false, + }, + ]); + program.frontend = frontend; } } else { program.frontend = false; @@ -169,23 +153,20 @@ export const askProQuestions = async (program: QuestionArgs) => { } if (!program.observability && program.template === "streaming") { - const { observability } = await prompts( + const { observability } = await inquirer.prompt([ { - type: "select", + type: "list", name: "observability", message: "Would you like to set up observability?", choices: [ - { title: "No", value: "none" }, + { name: "No", value: "none" }, ...(program.framework === "fastapi" - ? [{ title: "LlamaTrace", value: "llamatrace" }] + ? [{ name: "LlamaTrace", value: "llamatrace" }] : []), - { title: "Traceloop", value: "traceloop" }, + { name: "Traceloop", value: "traceloop" }, ], - initial: 0, }, - questionHandlers, - ); - + ]); program.observability = observability; } @@ -196,34 +177,32 @@ export const askProQuestions = async (program: QuestionArgs) => { const choices = program.template === "reflex" ? [ - { title: "Structured Extractor", value: "extractor" }, + { name: "Structured Extractor", value: "extractor" }, { - title: "Contract review (using Workflow)", + name: "Contract review (using Workflow)", value: "contract_review", }, ] : [ { - title: "Financial report (generate a financial report)", + name: "Financial report (generate a financial report)", value: "financial_report", }, { - title: "Form filling (fill missing value in a CSV file)", + name: "Form filling (fill missing value in a CSV file)", value: "form_filling", }, - { title: "Blog writer (Write a blog post)", value: "blog" }, + { name: "Blog writer (Write a blog post)", value: "blog" }, ]; - const { useCase } = await prompts( + const { useCase } = await inquirer.prompt([ { - type: "select", + type: "list", name: "useCase", message: "Which use case would you like to use?", choices, - initial: 0, }, - questionHandlers, - ); + ]); program.useCase = useCase; } @@ -245,16 +224,14 @@ export const askProQuestions = async (program: QuestionArgs) => { } if (!program.vectorDb) { - const { vectorDb } = await prompts( + const { vectorDb } = await inquirer.prompt([ { - type: "select", + type: "list", name: "vectorDb", message: "Would you like to use a vector database?", choices: getVectorDbChoices(program.framework), - initial: 0, }, - questionHandlers, - ); + ]); program.vectorDb = vectorDb; } @@ -274,18 +251,16 @@ export const askProQuestions = async (program: QuestionArgs) => { program.template, ); if (choices.length === 0) break; - const { selectedSource } = await prompts( + const { selectedSource } = await inquirer.prompt([ { - type: "select", + type: "list", name: "selectedSource", message: firstQuestion ? "Which data source would you like to use?" : "Would you like to add another data source?", choices, - initial: firstQuestion ? 1 : 0, }, - questionHandlers, - ); + ]); if (selectedSource === "no" || selectedSource === "none") { // user doesn't want another data source or any data source @@ -310,12 +285,12 @@ export const askProQuestions = async (program: QuestionArgs) => { break; } case "web": { - const { baseUrl } = await prompts( + const { baseUrl } = await inquirer.prompt([ { - type: "text", + type: "input", name: "baseUrl", message: "Please provide base URL of the website: ", - initial: "https://www.llamaindex.ai", + default: "https://www.llamaindex.ai", validate: (value: string) => { if (!value.includes("://")) { value = `https://${value}`; @@ -330,8 +305,7 @@ export const askProQuestions = async (program: QuestionArgs) => { return true; }, }, - questionHandlers, - ); + ]); program.dataSources.push({ type: "web", @@ -344,13 +318,13 @@ export const askProQuestions = async (program: QuestionArgs) => { break; } case "db": { - const dbPrompts: prompts.PromptObject[] = [ + const dbPrompts: QuestionCollection[] = [ { - type: "text", + type: "input", name: "uri", message: "Please enter the connection string (URI) for the database.", - initial: "mysql+pymysql://user:pass@localhost:3306/mydb", + default: "mysql+pymysql://user:pass@localhost:3306/mydb", validate: (value: string) => { if (!value) { return "Please provide a valid connection string"; @@ -365,17 +339,17 @@ export const askProQuestions = async (program: QuestionArgs) => { return true; }, }, - // Only ask for a query, user can provide more complex queries in the config file later { - type: (prev) => (prev ? "text" : null), + type: "input", name: "queries", message: "Please enter the SQL query to fetch data:", - initial: "SELECT * FROM mytable", + default: "SELECT * FROM mytable", + when: (answers: any) => !!answers.uri, }, ]; program.dataSources.push({ type: "db", - config: await prompts(dbPrompts, questionHandlers), + config: await inquirer.prompt(dbPrompts), }); break; } @@ -394,18 +368,15 @@ export const askProQuestions = async (program: QuestionArgs) => { if (program.useLlamaParse === undefined && program.template !== "reflex") { // if already set useLlamaParse, don't ask again if (program.dataSources.some((ds) => ds.type === "file")) { - const { useLlamaParse } = await prompts( + const { useLlamaParse } = await inquirer.prompt([ { - type: "toggle", + type: "confirm", name: "useLlamaParse", message: "Would you like to use LlamaParse (improved parser for RAG - requires API key)?", - initial: false, - active: "Yes", - inactive: "No", + default: false, }, - questionHandlers, - ); + ]); program.useLlamaParse = useLlamaParse; } } @@ -416,15 +387,14 @@ export const askProQuestions = async (program: QuestionArgs) => { if (!program.llamaCloudKey && !isCI) { // if already set, don't ask again // Ask for LlamaCloud API key - const { llamaCloudKey } = await prompts( + const { llamaCloudKey } = await inquirer.prompt([ { - type: "text", + type: "input", name: "llamaCloudKey", message: "Please provide your LlamaCloud API key (leave blank to skip):", }, - questionHandlers, - ); + ]); program.llamaCloudKey = llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY; } } @@ -437,16 +407,18 @@ export const askProQuestions = async (program: QuestionArgs) => { t.supportedFrameworks?.includes(program.framework), ); const toolChoices = options.map((tool) => ({ - title: `${tool.display}${toolRequiresConfig(tool) ? " (needs configuration)" : ""}`, + name: `${tool.display}${toolRequiresConfig(tool) ? " (needs configuration)" : ""}`, value: tool.name, })); - const { toolsName } = await prompts({ - type: "multiselect", - name: "toolsName", - message: - "Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter", - choices: toolChoices, - }); + const { toolsName } = await inquirer.prompt([ + { + type: "checkbox", + name: "toolsName", + message: + "Would you like to build an agent using tools? If so, select the tools here, otherwise just press enter", + choices: toolChoices, + }, + ]); const tools = toolsName?.map((tool: string) => supportedTools.find((t) => t.name === tool), ); diff --git a/questions/simple.ts b/questions/simple.ts index 7228736b8..576c6a8d1 100644 --- a/questions/simple.ts +++ b/questions/simple.ts @@ -1,4 +1,4 @@ -import prompts from "prompts"; +import inquirer from "inquirer"; import { AI_REPORTS, EXAMPLE_10K_SEC_FILES, @@ -9,7 +9,7 @@ import { askModelConfig } from "../helpers/providers"; import { getTools } from "../helpers/tools"; import { ModelConfig, TemplateFramework } from "../helpers/types"; import { PureQuestionArgs, QuestionResults } from "./types"; -import { askPostInstallAction, questionHandlers } from "./utils"; +import { askPostInstallAction } from "./utils"; type AppType = | "rag" @@ -31,65 +31,38 @@ type SimpleAnswers = { export const askSimpleQuestions = async ( args: PureQuestionArgs, ): Promise => { - const { appType } = await prompts( + const { appType } = await inquirer.prompt([ { - type: "select", + type: "list", name: "appType", message: "What app do you want to build?", - hint: "🤖: Agent, 🔀: Workflow", + pageSize: Infinity, choices: [ + new inquirer.Separator("Agents"), + { name: " Agentic RAG", value: "rag" }, + { name: " Data Scientist", value: "data_scientist" }, + { name: " Code Artifact Agent", value: "code_artifact" }, + { name: " Information Extractor", value: "extractor" }, + new inquirer.Separator("Agentic Document Workflows"), { - title: "🤖 Agentic RAG", - value: "rag", - description: - "Chatbot that answers questions based on provided documents.", - }, - { - title: "🤖 Data Scientist", - value: "data_scientist", - description: - "Agent that analyzes data and generates visualizations by using a code interpreter.", - }, - { - title: "🤖 Code Artifact Agent", - value: "code_artifact", - description: - "Agent that writes code, runs it in a sandbox, and shows the output in the chat UI.", - }, - { - title: "🤖 Information Extractor", - value: "extractor", - description: - "Extracts information from documents and returns it as a structured JSON object.", - }, - { - title: "🔀 Financial Report Generator", + name: " Financial Report Generator", value: "financial_report_agent", - description: - "Generates a financial report by analyzing the provided 10-K SEC data. Uses a code interpreter to create charts or to conduct further analysis.", }, { - title: "🔀 Financial 10k SEC Form Filler", + name: " Financial 10k SEC Form Filler", value: "form_filling", - description: - "Extracts information from 10k SEC data and uses it to fill out a CSV form.", }, { - title: "🔀 Contract Reviewer", + name: " Contract Review", value: "contract_review", - description: - "Extracts and reviews contracts to ensure compliance with GDPR regulations", }, { - title: "🔀 Deep Researcher", + name: " Deep Researcher", value: "deep_research", - description: - "Researches and analyzes provided documents from multiple perspectives, generating a comprehensive report with citations to support key findings and insights.", }, ], }, - questionHandlers, - ); + ]); let language: TemplateFramework = "fastapi"; let llamaCloudKey = args.llamaCloudKey; @@ -100,46 +73,41 @@ export const askSimpleQuestions = async ( appType !== "contract_review" && appType !== "deep_research" ) { - const { language: newLanguage } = await prompts( + const { language: newLanguage } = await inquirer.prompt([ { - type: "select", + type: "list", name: "language", message: "What language do you want to use?", choices: [ - { title: "Python (FastAPI)", value: "fastapi" }, - { title: "Typescript (NextJS)", value: "nextjs" }, + { name: "Python (FastAPI)", value: "fastapi" }, + { name: "Typescript (NextJS)", value: "nextjs" }, ], }, - questionHandlers, - ); + ]); language = newLanguage; } - const { useLlamaCloud: newUseLlamaCloud } = await prompts( + const { useLlamaCloud: newUseLlamaCloud } = await inquirer.prompt([ { - type: "toggle", + type: "confirm", name: "useLlamaCloud", message: "Do you want to use LlamaCloud services?", - initial: false, - active: "Yes", - inactive: "No", - hint: "see https://www.llamaindex.ai/enterprise for more info", + suffix: " (see https://www.llamaindex.ai/enterprise for more info)", + default: false, }, - questionHandlers, - ); + ]); useLlamaCloud = newUseLlamaCloud; if (useLlamaCloud && !llamaCloudKey) { // Ask for LlamaCloud API key, if not set - const { llamaCloudKey: newLlamaCloudKey } = await prompts( + const { llamaCloudKey: newLlamaCloudKey } = await inquirer.prompt([ { - type: "text", + type: "input", name: "llamaCloudKey", message: "Please provide your LlamaCloud API key (leave blank to skip):", }, - questionHandlers, - ); + ]); llamaCloudKey = newLlamaCloudKey || process.env.LLAMA_CLOUD_API_KEY; } diff --git a/questions/utils.ts b/questions/utils.ts index 710cdf5db..9c416f742 100644 --- a/questions/utils.ts +++ b/questions/utils.ts @@ -1,8 +1,8 @@ import { execSync } from "child_process"; import fs from "fs"; +import inquirer from "inquirer"; import path from "path"; import { red } from "picocolors"; -import prompts from "prompts"; import { TemplateDataSourceType, TemplatePostInstallAction } from "../helpers"; import { toolsRequireConfig } from "../helpers/tools"; import { QuestionResults } from "./types"; @@ -115,7 +115,7 @@ export const onPromptState = (state: any) => { }; export const toChoice = (value: string) => { - return { title: value, value }; + return { name: value, value }; }; export const questionHandlers = { @@ -131,15 +131,15 @@ export async function askPostInstallAction( ): Promise { const actionChoices = [ { - title: "Just generate code (~1 sec)", + name: "Just generate code (~1 sec)", value: "none", }, { - title: "Start in VSCode (~1 sec)", + name: "Start in VSCode (~1 sec)", value: "VSCode", }, { - title: "Generate code and install dependencies (~2 min)", + name: "Generate code and install dependencies (~2 min)", value: "dependencies", }, ]; @@ -158,21 +158,18 @@ export async function askPostInstallAction( !toolsRequireConfig(args.tools) ) { actionChoices.push({ - title: "Generate code, install dependencies, and run the app (~2 min)", + name: "Generate code, install dependencies, and run the app (~2 min)", value: "runApp", }); } - const { action } = await prompts( + const { action } = await inquirer.prompt([ { - type: "select", + type: "list", name: "action", message: "How would you like to proceed?", choices: actionChoices, - initial: 1, }, - questionHandlers, - ); - + ]); return action; }