From 107adf604f860be16a526b3a46ad9f2b8fed343b Mon Sep 17 00:00:00 2001 From: Ning Lv Date: Tue, 15 Oct 2024 10:33:57 +0800 Subject: [PATCH 001/110] feat: add monitoring style --- .../src/pages/chatbot/components/Message.css | 22 +++++++++++++++++++ .../src/pages/chatbot/components/Message.tsx | 12 +++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 source/portal/src/pages/chatbot/components/Message.css diff --git a/source/portal/src/pages/chatbot/components/Message.css b/source/portal/src/pages/chatbot/components/Message.css new file mode 100644 index 000000000..f943489ba --- /dev/null +++ b/source/portal/src/pages/chatbot/components/Message.css @@ -0,0 +1,22 @@ +.custom-header { + font-weight: bold; + color: #333; + margin: 10px 0; +} + +.custom-table { + width: 100%; + border-collapse: collapse; + margin-top: 10px; +} + +.custom-table-header { + background-color: #f2f2f2; + padding: 8px; + border: 1px solid #ddd; +} + +.custom-table-cell { + padding: 8px; + border: 1px solid #ddd; +} \ No newline at end of file diff --git a/source/portal/src/pages/chatbot/components/Message.tsx b/source/portal/src/pages/chatbot/components/Message.tsx index aceb87162..ebef566bf 100644 --- a/source/portal/src/pages/chatbot/components/Message.tsx +++ b/source/portal/src/pages/chatbot/components/Message.tsx @@ -5,6 +5,7 @@ import ReactMarkdown from 'react-markdown'; import { BounceLoader } from 'react-spinners'; import remarkGfm from 'remark-gfm'; import BedrockImg from 'src/assets/bedrock.webp'; +import './Message.css'; interface MessageProps { type: 'ai' | 'human'; @@ -45,7 +46,16 @@ const Message: React.FC = ({ headingTagOverride="h5" headerText="Monitoring" > - +

, + h2: ({node, ...props}) =>

, + h3: ({node, ...props}) =>

, + table: ({node, ...props}) => , + th: ({node, ...props}) =>
, + td: ({node, ...props}) => , + }} + > {message.monitoring} From b03f59d899736a042a7f6155daf416a28651cd44 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 15 Oct 2024 06:28:33 +0000 Subject: [PATCH 002/110] feat: format monitoring trace --- .../functions/lambda_common_tools/rag.py | 34 +++++++--- .../main_utils/online_entries/common_entry.py | 32 +++++++--- source/portal/package-lock.json | 62 +++++++++++++++++++ source/portal/package.json | 1 + .../src/pages/chatbot/components/Message.tsx | 3 +- 5 files changed, 113 insertions(+), 19 deletions(-) diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index 0521dc6c6..d82b5fbe0 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -6,14 +6,27 @@ from common_logic.common_utils.lambda_invoke_utils import send_trace -def lambda_handler(event_body,context=None): +def format_rag_data(data): + if data is None or len(data) == 0: + return "" + + md_content = "---------\n" + for item in data: + md_content += f"{item}\n" + md_content += "---------\n" + + return md_content + + +def lambda_handler(event_body, context=None): state = event_body['state'] context_list = [] # add qq match results context_list.extend(state['qq_match_results']) figure_list = [] retriever_params = state["chatbot_config"]["private_knowledge_config"] - retriever_params["query"] = state[retriever_params.get("retriever_config",{}).get("query_key","query")] + retriever_params["query"] = state[retriever_params.get( + "retriever_config", {}).get("query_key", "query")] output: str = invoke_lambda( event_body=retriever_params, lambda_name="Online_Functions", @@ -23,15 +36,17 @@ def lambda_handler(event_body,context=None): for doc in output["result"]["docs"]: context_list.append(doc["page_content"]) - figure_list = figure_list + doc.get("figure",[]) - + figure_list = figure_list + doc.get("figure", []) + # Remove duplicate figures unique_set = {tuple(d.items()) for d in figure_list} unique_figure_list = [dict(t) for t in unique_set] state['extra_response']['figures'] = unique_figure_list - - send_trace(f"\n\n**rag-contexts:** {context_list}", enable_trace=state["enable_trace"]) - + + context_md = format_rag_data(context_list) + send_trace( + f"\n\n**RAG contexts:**\n\n{context_md}\n\n", enable_trace=state["enable_trace"]) + group_name = state['chatbot_config']['group_name'] llm_config = state["chatbot_config"]["private_knowledge_config"]['llm_config'] chatbot_id = state["chatbot_config"]["chatbot_id"] @@ -61,7 +76,6 @@ def lambda_handler(event_body,context=None): }, }, ) - # - - return {"code":0,"result":output} + # + return {"code": 0, "result": output} diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index 8a59c4379..d06cde0a5 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -111,18 +111,30 @@ def is_null_or_empty(value): return False +def format_preprocess_output(ori_query, rewrite_query): + if is_null_or_empty(ori_query) or is_null_or_empty(rewrite_query): + return "" + + markdown_table = "| Original Query | Rewritten Query |\n" + markdown_table += "|-------|-------|\n" + markdown_table += f"| {ori_query} | {rewrite_query} |\n" + + return markdown_table + + def format_intention_output(data): if is_null_or_empty(data): return "" - markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" - markdown_table += "|----------------------|-------|------------|-------------|----------------------|\n" + markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" + markdown_table += "|-------|-------|-------|-------|-------|\n" for item in data: query = item.get("query", "") score = item.get("score", "") name = item.get("name", "") intent = item.get("intent", "") - kwargs = ', '.join([f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) + kwargs = ', '.join( + [f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" logger.info(markdown_table) @@ -142,7 +154,8 @@ def query_preprocess(state: ChatbotState): handler_name="lambda_handler", ) - send_trace(f"\n**query rewrite:** {output}\n**origin query:** {state['query']}") + preprocess_md = format_preprocess_output(state["query"], output) + send_trace(f"{preprocess_md}") return {"query_rewrite": output} @@ -198,7 +211,7 @@ def intention_detection(state: ChatbotState): markdown_table = format_intention_output(intent_fewshot_examples) send_trace( - f"**intention retrieved:**\n\n {markdown_table}", + f"{markdown_table}", state["stream"], state["ws_connection_id"], state["enable_trace"], @@ -253,7 +266,8 @@ def agent(state: ChatbotState): or state["chatbot_config"]["agent_config"]["only_use_rag_tool"] ): if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: - send_trace("agent only use rag tool", enable_trace=state["enable_trace"]) + send_trace("agent only use rag tool", + enable_trace=state["enable_trace"]) elif no_intention_condition: send_trace( "no_intention_condition, switch to rag tool", @@ -361,7 +375,8 @@ def build_graph(chatbot_state_cls): # add node for all chat/rag/agent mode workflow.add_node("query_preprocess", query_preprocess) # chat mode - workflow.add_node("llm_direct_results_generation", llm_direct_results_generation) + workflow.add_node("llm_direct_results_generation", + llm_direct_results_generation) # rag mode # workflow.add_node("knowledge_retrieve", knowledge_retrieve) # workflow.add_node("llm_rag_results_generation", llm_rag_results_generation) @@ -376,7 +391,8 @@ def build_graph(chatbot_state_cls): # add all edges workflow.set_entry_point("query_preprocess") # chat mode - workflow.add_edge("llm_direct_results_generation", "final_results_preparation") + workflow.add_edge("llm_direct_results_generation", + "final_results_preparation") # rag mode # workflow.add_edge("knowledge_retrieve", "llm_rag_results_generation") # workflow.add_edge("llm_rag_results_generation", END) diff --git a/source/portal/package-lock.json b/source/portal/package-lock.json index 8ab64c44b..24bd18609 100644 --- a/source/portal/package-lock.json +++ b/source/portal/package-lock.json @@ -27,6 +27,7 @@ "react-spinners": "^0.13.8", "react-use-websocket": "^4.8.1", "remark-gfm": "^4.0.0", + "remark-html": "16.0.1", "sass": "^1.74.1", "uuid": "^9.0.1" }, @@ -3941,6 +3942,42 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-sanitize": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-5.0.1.tgz", + "integrity": "sha512-IGrgWLuip4O2nq5CugXy4GI2V8kx4sFVy5Hd4vF7AR2gxS0N9s7nEAVUyeMtZKZvzrxVsHt73XdTsno1tClIkQ==", + "dependencies": { + "@types/hast": "^3.0.0", + "@ungap/structured-clone": "^1.2.0", + "unist-util-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-html": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.3.tgz", + "integrity": "sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hast-util-to-jsx-runtime": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.0.tgz", @@ -3996,6 +4033,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/i18next": { "version": "23.11.3", "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.11.3.tgz", @@ -6343,6 +6389,22 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-html": { + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-16.0.1.tgz", + "integrity": "sha512-B9JqA5i0qZe0Nsf49q3OXyGvyXuZFDzAP2iOFLEumymuYJITVpiH1IgsTEwTpdptDmZlMDMWeDmSawdaJIGCXQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "hast-util-sanitize": "^5.0.0", + "hast-util-to-html": "^9.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-parse": { "version": "11.0.0", "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", diff --git a/source/portal/package.json b/source/portal/package.json index 48414725d..b4f40f3a4 100644 --- a/source/portal/package.json +++ b/source/portal/package.json @@ -29,6 +29,7 @@ "react-spinners": "^0.13.8", "react-use-websocket": "^4.8.1", "remark-gfm": "^4.0.0", + "remark-html": "16.0.1", "sass": "^1.74.1", "uuid": "^9.0.1" }, diff --git a/source/portal/src/pages/chatbot/components/Message.tsx b/source/portal/src/pages/chatbot/components/Message.tsx index ebef566bf..35c0f4538 100644 --- a/source/portal/src/pages/chatbot/components/Message.tsx +++ b/source/portal/src/pages/chatbot/components/Message.tsx @@ -4,6 +4,7 @@ import Avatar from 'react-avatar'; import ReactMarkdown from 'react-markdown'; import { BounceLoader } from 'react-spinners'; import remarkGfm from 'remark-gfm'; +import remarkHtml from 'remark-html'; import BedrockImg from 'src/assets/bedrock.webp'; import './Message.css'; @@ -46,7 +47,7 @@ const Message: React.FC = ({ headingTagOverride="h5" headerText="Monitoring" > -

, h2: ({node, ...props}) =>

, From 5b8612fa6d297b10ccdc6de72c90d939d59b1e36 Mon Sep 17 00:00:00 2001 From: Ning Lv Date: Tue, 15 Oct 2024 14:31:34 +0800 Subject: [PATCH 003/110] chore: update text --- .../common_logic/common_utils/lambda_invoke_utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py index f87ef838e..5188480da 100644 --- a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py +++ b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py @@ -17,12 +17,12 @@ __FUNC_NAME_MAP = { - "query_preprocess": "Preprocess for multi-round conversation", - "intention_detection": "Intention detection", + "query_preprocess": "Preprocess for Multi-round Conversation", + "intention_detection": "Intention Detection", "agent": "Agent", - "tools_choose_and_results_generation": "Tool calling", - "results_evaluation": "Result evaluation", - "tool_execution": "Final tool result" + "tools_choose_and_results_generation": "Tool Calling", + "results_evaluation": "Result Evaluation", + "tool_execution": "Final Tool Result" } class LAMBDA_INVOKE_MODE(enum.Enum): From 4584c70672cc7b97ec787b336a7d871bcd835177 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 15 Oct 2024 07:20:41 +0000 Subject: [PATCH 004/110] chore: update --- .../online/functions/lambda_common_tools/rag.py | 11 ++++++----- .../main_utils/online_entries/agent_base.py | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index d82b5fbe0..05791454e 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -10,12 +10,13 @@ def format_rag_data(data): if data is None or len(data) == 0: return "" - md_content = "---------\n" + markdown_table = "| RAG Context |\n" + markdown_table += "|-----|\n" for item in data: - md_content += f"{item}\n" - md_content += "---------\n" + item = item.replace("\n", "
") + markdown_table += f"| {item} |\n" - return md_content + return markdown_table def lambda_handler(event_body, context=None): @@ -45,7 +46,7 @@ def lambda_handler(event_body, context=None): context_md = format_rag_data(context_list) send_trace( - f"\n\n**RAG contexts:**\n\n{context_md}\n\n", enable_trace=state["enable_trace"]) + f"\n\n{context_md}\n\n", enable_trace=state["enable_trace"]) group_name = state['chatbot_config']['group_name'] llm_config = state["chatbot_config"]["private_knowledge_config"]['llm_config'] diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py b/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py index b61a1c988..7b76587ad 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py @@ -30,7 +30,7 @@ def tools_choose_and_results_generation(state): agent_repeated_call_validation = state['agent_current_call_number'] < state['agent_repeated_call_limit'] send_trace( - f"\n\n**agent_current_output:** \n{json.dumps(agent_current_output['agent_output'],ensure_ascii=False,indent=2)}\n\n **agent_current_call_number:** {agent_current_call_number}", + f"\n\n**agent_current_output:** \n\n{json.dumps(agent_current_output['agent_output'],ensure_ascii=False,indent=2)}\n\n **agent_current_call_number:** {agent_current_call_number}", state["stream"], state["ws_connection_id"] ) From 54bcf8a147a122279ff37c6840cfc4a1de5df38e Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 16 Oct 2024 08:28:34 +0000 Subject: [PATCH 005/110] chore: update magic config --- source/infrastructure/cli/magic-config.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/source/infrastructure/cli/magic-config.ts b/source/infrastructure/cli/magic-config.ts index cc0205f8c..d994e768c 100644 --- a/source/infrastructure/cli/magic-config.ts +++ b/source/infrastructure/cli/magic-config.ts @@ -66,7 +66,7 @@ async function getAwsAccountAndRegion() { AWS_REGION = new AWS.IniLoader().loadFrom({ isConfig: true }).default.region; } catch (error) { - console.error("No default region found in the AWS credentials file. Please enter the region you want to deploy the intelli-agent knowledge base"); + console.error("No default region found in the AWS credentials file. Please enter the region you want to deploy the intelli-agent solution"); AWS_REGION = undefined; } @@ -172,7 +172,7 @@ async function processCreateOptions(options: any): Promise { { type: "input", name: "intelliAgentDeployRegion", - message: "Please enter the region you want to deploy the intelli-agent knowledge base", + message: "Please enter the region you want to deploy the intelli-agent solution", initial: options.intelliAgentDeployRegion ?? AWS_REGION, validate(intelliAgentDeployRegion: string) { if (Object.values(supportedRegions).includes(intelliAgentDeployRegion)) { From 1d5065f5549f022c81bc2e31bde834c42df7adfb Mon Sep 17 00:00:00 2001 From: Xu Han Date: Thu, 17 Oct 2024 02:55:50 +0000 Subject: [PATCH 006/110] fix: fix intention bug --- source/lambda/etl/get_status.py | 2 +- source/lambda/intention/intention.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/source/lambda/etl/get_status.py b/source/lambda/etl/get_status.py index 9f189d76e..8682a1e34 100644 --- a/source/lambda/etl/get_status.py +++ b/source/lambda/etl/get_status.py @@ -6,7 +6,7 @@ logger = logging.getLogger() logger.setLevel(logging.INFO) -state_machine_arn = os.environ["sfn_arn"] +state_machine_arn = os.environ["SFN_ARN"] def lambda_handler(event, context): diff --git a/source/lambda/intention/intention.py b/source/lambda/intention/intention.py index 694731ef9..fa50ab1a4 100644 --- a/source/lambda/intention/intention.py +++ b/source/lambda/intention/intention.py @@ -68,8 +68,8 @@ response = opensearch_client.describe_domain( DomainName=aos_domain_name) aos_endpoint = response["DomainStatus"]["Endpoint"] - aos_client = LLMBotOpenSearchClient( - aos_endpoint, (username, password)).client + aos_client = LLMBotOpenSearchClient( + aos_endpoint, (username, password)).client except sm_client.exceptions.ResourceNotFoundException: logger.info("Secret '%s' not found in Secrets Manager", aos_secret) except Exception as err: From f9b37a2833d77cbe389c6f8ee12f4def35c8cc9a Mon Sep 17 00:00:00 2001 From: Xu Han Date: Thu, 17 Oct 2024 03:30:11 +0000 Subject: [PATCH 007/110] chore: linting and support bedrock embedding --- source/lambda/intention/aos/sm_utils.py | 176 ++++++++--------- source/lambda/intention/intention.py | 247 ++++++++---------------- 2 files changed, 163 insertions(+), 260 deletions(-) diff --git a/source/lambda/intention/aos/sm_utils.py b/source/lambda/intention/aos/sm_utils.py index ec27e332b..794f61616 100644 --- a/source/lambda/intention/aos/sm_utils.py +++ b/source/lambda/intention/aos/sm_utils.py @@ -1,21 +1,19 @@ -import json import io +import json +import logging from typing import Any, Dict, Iterator, List, Mapping, Optional -from langchain.llms.sagemaker_endpoint import LLMContentHandler, SagemakerEndpoint -from langchain.embeddings import SagemakerEndpointEmbeddings -from langchain.embeddings.sagemaker_endpoint import EmbeddingsContentHandler + +import boto3 from langchain.callbacks.manager import CallbackManagerForLLMRun +from langchain.embeddings import BedrockEmbeddings, SagemakerEndpointEmbeddings +from langchain.embeddings.sagemaker_endpoint import EmbeddingsContentHandler +from langchain.llms.sagemaker_endpoint import LLMContentHandler, SagemakerEndpoint from langchain.llms.utils import enforce_stop_tokens -from typing import Dict, List, Optional, Any, Iterator +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.messages import BaseMessage from langchain_core.outputs import GenerationChunk -import boto3 from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import ( - BaseMessage, -) -import logging logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -43,7 +41,7 @@ def transform_input(self, inputs: List[str], model_kwargs: Dict) -> bytes: def transform_output(self, output: bytes) -> List[List[float]]: response_json = json.loads(output.read().decode("utf-8")) - return response_json["sentence_embeddings"]['dense_vecs'] + return response_json["sentence_embeddings"]["dense_vecs"] class crossContentHandler(LLMContentHandler): @@ -51,13 +49,12 @@ class crossContentHandler(LLMContentHandler): accepts = "application/json" def transform_input(self, prompt: str, model_kwargs: Dict) -> bytes: - input_str = json.dumps( - {"inputs": prompt, "docs": model_kwargs["context"]}) - return input_str.encode('utf-8') + input_str = json.dumps({"inputs": prompt, "docs": model_kwargs["context"]}) + return input_str.encode("utf-8") def transform_output(self, output: bytes) -> str: response_json = json.loads(output.read().decode("utf-8")) - return response_json['scores'][0][1] + return response_json["scores"][0][1] class rerankContentHandler(LLMContentHandler): @@ -66,11 +63,11 @@ class rerankContentHandler(LLMContentHandler): def transform_input(self, rerank_pairs: str, model_kwargs: Dict) -> bytes: input_str = json.dumps({"inputs": json.loads(rerank_pairs)}) - return input_str.encode('utf-8') + return input_str.encode("utf-8") def transform_output(self, output: bytes) -> str: response_json = json.loads(output.read().decode("utf-8")) - return json.dumps(response_json['rerank_scores']) + return json.dumps(response_json["rerank_scores"]) class answerContentHandler(LLMContentHandler): @@ -79,29 +76,28 @@ class answerContentHandler(LLMContentHandler): def transform_input(self, question: str, model_kwargs: Dict) -> bytes: - template_1 = '以下context xml tag内的文本内容为背景知识:\n\n{context}\n\n请根据背景知识, 回答这个问题:{question}' + template_1 = "以下context xml tag内的文本内容为背景知识:\n\n{context}\n\n请根据背景知识, 回答这个问题:{question}" context = model_kwargs["context"] if len(context) == 0: prompt = question else: - prompt = template_1.format( - context=model_kwargs["context"], question=question) + prompt = template_1.format(context=model_kwargs["context"], question=question) - input_str = json.dumps({"inputs": prompt, - "history": model_kwargs["history"], - "parameters": model_kwargs["parameters"]}) - return input_str.encode('utf-8') + input_str = json.dumps( + {"inputs": prompt, "history": model_kwargs["history"], "parameters": model_kwargs["parameters"]} + ) + return input_str.encode("utf-8") def transform_output(self, output: bytes) -> str: response_json = json.loads(output.read().decode("utf-8")) - return response_json['outputs'] + return response_json["outputs"] class LineIterator: """ - A helper class for parsing the byte stream input. - + A helper class for parsing the byte stream input. + The output of the model will be in the following format: ``` b'{"outputs": [" a"]}\n' @@ -109,19 +105,19 @@ class LineIterator: b'{"outputs": [" problem"]}\n' ... ``` - - While usually each PayloadPart event from the event stream will contain a byte array + + While usually each PayloadPart event from the event stream will contain a byte array with a full json, this is not guaranteed and some of the json objects may be split across PayloadPart events. For example: ``` {'PayloadPart': {'Bytes': b'{"outputs": '}} {'PayloadPart': {'Bytes': b'[" problem"]}\n'}} ``` - + This class accounts for this by concatenating bytes written via the 'write' function and then exposing a method which will return lines (ending with a '\n' character) within - the buffer via the 'scan_lines' function. It maintains the position of the last read - position to ensure that previous bytes are not exposed again. + the buffer via the 'scan_lines' function. It maintains the position of the last read + position to ensure that previous bytes are not exposed again. """ def __init__(self, stream): @@ -136,7 +132,7 @@ def __next__(self): while True: self.buffer.seek(self.read_pos) line = self.buffer.readline() - if line and line[-1] == ord('\n'): + if line and line[-1] == ord("\n"): self.read_pos += len(line) return line[:-1] try: @@ -145,29 +141,28 @@ def __next__(self): if self.read_pos < self.buffer.getbuffer().nbytes: continue raise - if 'PayloadPart' not in chunk: - print('Unknown event type:' + chunk) + if "PayloadPart" not in chunk: + print("Unknown event type:" + chunk) continue self.buffer.seek(0, io.SEEK_END) - self.buffer.write(chunk['PayloadPart']['Bytes']) + self.buffer.write(chunk["PayloadPart"]["Bytes"]) class SagemakerEndpointWithStreaming(SagemakerEndpoint): chat_history: List[Dict] = None def _stream( - self, - prompt: str, - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, ) -> Iterator[GenerationChunk]: _model_kwargs = self.model_kwargs or {} _model_kwargs = {**_model_kwargs, **kwargs} _endpoint_kwargs = self.endpoint_kwargs or {} - body = self.content_handler.transform_input( - prompt, self.chat_history, _model_kwargs) + body = self.content_handler.transform_input(prompt, self.chat_history, _model_kwargs) # content_type = self.content_handler.content_type # accepts = self.content_handler.accepts resp = self.client.invoke_endpoint_with_response_stream( @@ -263,16 +258,12 @@ def validate_environment(cls, values: Dict) -> Dict: try: if values["credentials_profile_name"] is not None: - session = boto3.Session( - profile_name=values["credentials_profile_name"] - ) + session = boto3.Session(profile_name=values["credentials_profile_name"]) else: # use default credentials session = boto3.Session() - values["client"] = session.client( - "sagemaker-runtime", region_name=values["region_name"] - ) + values["client"] = session.client("sagemaker-runtime", region_name=values["region_name"]) except Exception as e: raise ValueError( @@ -282,10 +273,7 @@ def validate_environment(cls, values: Dict) -> Dict: ) from e except ImportError: - raise ImportError( - "Could not import boto3 python package. " - "Please install it with `pip install boto3`." - ) + raise ImportError("Could not import boto3 python package. " "Please install it with `pip install boto3`.") return values @property @@ -303,21 +291,18 @@ def _llm_type(self) -> str: return "sagemaker_endpoint" def _stream( - self, - messages: List[BaseMessage], - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, ) -> Iterator[GenerationChunk]: _model_kwargs = self.model_kwargs or {} _model_kwargs = {**_model_kwargs, **kwargs} _endpoint_kwargs = self.endpoint_kwargs or {} # body = self.content_handler.transform_input(prompt, self.chat_history, _model_kwargs) - body = json.dumps({ - "messages": messages, - "parameters": {**_model_kwargs} - }) + body = json.dumps({"messages": messages, "parameters": {**_model_kwargs}}) # print(body) # # print(sdg) # content_type = self.content_handler.content_type @@ -339,21 +324,19 @@ def _stream( # run_manager.on_llm_new_token(resp_output) yield resp_output - def _generate(self, - messages: List[BaseMessage], - stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, - **kwargs: Any, - ) -> str: + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> str: _model_kwargs = self.model_kwargs or {} _model_kwargs = {**_model_kwargs, **kwargs} _endpoint_kwargs = self.endpoint_kwargs or {} # body = self.content_handler.transform_input(prompt, self.chat_history, _model_kwargs) - body = json.dumps({ - "messages": messages, - "parameters": {**_model_kwargs} - }) + body = json.dumps({"messages": messages, "parameters": {**_model_kwargs}}) try: response = self.client.invoke_endpoint( EndpointName=self.endpoint_name, @@ -375,7 +358,9 @@ def _generate(self, return text -def SagemakerEndpointVectorOrCross(prompt: str, endpoint_name: str, region_name: str, model_type: str, stop: List[str], target_model=None, **kwargs) -> SagemakerEndpoint: +def SagemakerEndpointVectorOrCross( + prompt: str, endpoint_name: str, region_name: str, model_type: str, stop: List[str], target_model=None, **kwargs +) -> SagemakerEndpoint: """ original class invocation: response = self.client.invoke_endpoint( @@ -390,17 +375,11 @@ def SagemakerEndpointVectorOrCross(prompt: str, endpoint_name: str, region_name: endpoint_kwargs = {"TargetModel": target_model} else: endpoint_kwargs = None - client = boto3.client( - "sagemaker-runtime", - region_name=region_name - ) + client = boto3.client("sagemaker-runtime", region_name=region_name) if model_type == "vector" or model_type == "bce": content_handler = vectorContentHandler() embeddings = SagemakerEndpointEmbeddings( - client=client, - endpoint_name=endpoint_name, - content_handler=content_handler, - endpoint_kwargs=endpoint_kwargs + client=client, endpoint_name=endpoint_name, content_handler=content_handler, endpoint_kwargs=endpoint_kwargs ) query_result = embeddings.embed_query(prompt) return query_result @@ -409,15 +388,15 @@ def SagemakerEndpointVectorOrCross(prompt: str, endpoint_name: str, region_name: elif model_type == "m3": content_handler = m3ContentHandler() model_kwargs = {} - model_kwargs['batch_size'] = 12 - model_kwargs['max_length'] = 512 - model_kwargs['return_type'] = 'dense' + model_kwargs["batch_size"] = 12 + model_kwargs["max_length"] = 512 + model_kwargs["return_type"] = "dense" embeddings = SagemakerEndpointEmbeddings( client=client, endpoint_name=endpoint_name, content_handler=content_handler, model_kwargs=model_kwargs, - endpoint_kwargs=endpoint_kwargs + endpoint_kwargs=endpoint_kwargs, ) query_result = embeddings.embed_query(prompt) return query_result @@ -431,32 +410,37 @@ def SagemakerEndpointVectorOrCross(prompt: str, endpoint_name: str, region_name: endpoint_name=endpoint_name, # region_name = region_name, content_handler=content_handler, - endpoint_kwargs=endpoint_kwargs + endpoint_kwargs=endpoint_kwargs, ) return genericModel(prompt=prompt, stop=stop, **kwargs) def getCustomEmbeddings(endpoint_name: str, region_name: str, model_type: str) -> SagemakerEndpointEmbeddings: - client = boto3.client( - "sagemaker-runtime", - region_name=region_name - ) + client = boto3.client("sagemaker-runtime", region_name=region_name) + bedrock_client = boto3.client("bedrock-runtime") embeddings = None - if model_type == "bce": + if model_type == "bedrock": + content_handler = BedrockEmbeddings() + embeddings = BedrockEmbeddings( + client=bedrock_client, + region_name=region_name, + model_id=endpoint_name, + ) + elif model_type == "bce": content_handler = vectorContentHandler() embeddings = SagemakerEndpointEmbeddings( client=client, endpoint_name=endpoint_name, content_handler=content_handler, - endpoint_kwargs={"TargetModel": "bce_embedding_model.tar.gz"} + endpoint_kwargs={"TargetModel": "bce_embedding_model.tar.gz"}, ) # compatible with both m3 and bce. else: content_handler = m3ContentHandler() model_kwargs = {} - model_kwargs['batch_size'] = 12 - model_kwargs['max_length'] = 512 - model_kwargs['return_type'] = 'dense' + model_kwargs["batch_size"] = 12 + model_kwargs["max_length"] = 512 + model_kwargs["return_type"] = "dense" embeddings = SagemakerEndpointEmbeddings( client=client, endpoint_name=endpoint_name, diff --git a/source/lambda/intention/intention.py b/source/lambda/intention/intention.py index fa50ab1a4..e3871b5a1 100644 --- a/source/lambda/intention/intention.py +++ b/source/lambda/intention/intention.py @@ -1,36 +1,38 @@ import hashlib import json +import logging import os import re import time +from io import BytesIO from typing import List + import boto3 -from openpyxl import load_workbook -from io import BytesIO +from aos import sm_utils +from aos.aos_utils import LLMBotOpenSearchClient from botocore.paginate import TokenEncoder -from opensearchpy import NotFoundError, RequestError, helpers, RequestsHttpConnection -import logging -from langchain.embeddings.bedrock import BedrockEmbeddings +from constant import ( + BULK_SIZE, + DEFAULT_CONTENT_TYPE, + DEFAULT_MAX_ITEMS, + DEFAULT_SIZE, + DOWNLOAD_RESOURCE, + EXECUTION_RESOURCE, + INDEX_USED_SCAN_RESOURCE, + PRESIGNED_URL_RESOURCE, + SECRET_NAME, + ModelDimensionMap, +) from langchain.docstore.document import Document +from langchain.embeddings.bedrock import BedrockEmbeddings from langchain_community.vectorstores import OpenSearchVectorSearch from langchain_community.vectorstores.opensearch_vector_search import ( OpenSearchVectorSearch, ) -from aos import sm_utils +from openpyxl import load_workbook +from opensearchpy import NotFoundError, RequestError, RequestsHttpConnection, helpers from requests_aws4auth import AWS4Auth -from aos.aos_utils import LLMBotOpenSearchClient -from constant import (BULK_SIZE, - DEFAULT_CONTENT_TYPE, - DEFAULT_MAX_ITEMS, - DEFAULT_SIZE, - DOWNLOAD_RESOURCE, - EXECUTION_RESOURCE, - INDEX_USED_SCAN_RESOURCE, - PRESIGNED_URL_RESOURCE, - SECRET_NAME, - ModelDimensionMap) - logger = logging.getLogger(__name__) encoder = TokenEncoder() @@ -55,21 +57,17 @@ sm_client = boto3.client("secretsmanager") try: - master_user = sm_client.get_secret_value( - SecretId=aos_secret)["SecretString"] - secret_body = sm_client.get_secret_value( - SecretId=SECRET_NAME)['SecretString'] + master_user = sm_client.get_secret_value(SecretId=aos_secret)["SecretString"] + secret_body = sm_client.get_secret_value(SecretId=SECRET_NAME)["SecretString"] secret = json.loads(secret_body) username = secret.get("username") password = secret.get("password") if not aos_endpoint: opensearch_client = boto3.client("opensearch") - response = opensearch_client.describe_domain( - DomainName=aos_domain_name) + response = opensearch_client.describe_domain(DomainName=aos_domain_name) aos_endpoint = response["DomainStatus"]["Endpoint"] - aos_client = LLMBotOpenSearchClient( - aos_endpoint, (username, password)).client + aos_client = LLMBotOpenSearchClient(aos_endpoint, (username, password)).client except sm_client.exceptions.ResourceNotFoundException: logger.info("Secret '%s' not found in Secrets Manager", aos_secret) except Exception as err: @@ -81,8 +79,7 @@ bedrock_client = boto3.client("bedrock-runtime", region_name=bedrock_region) credentials = boto3.Session().get_credentials() -awsauth = AWS4Auth(refreshable_credentials=credentials, - region=region, service="es") +awsauth = AWS4Auth(refreshable_credentials=credentials, region=region, service="es") resp_header = { "Content-Type": "application/json", @@ -104,31 +101,23 @@ def __init__( def aos_ingestion(self, documents: List[Document]) -> None: texts = [doc.page_content for doc in documents] metadatas = [doc.metadata for doc in documents] - embeddings_vectors = self.docsearch.embedding_function.embed_documents( - list(texts) - ) + embeddings_vectors = self.docsearch.embedding_function.embed_documents(list(texts)) if isinstance(embeddings_vectors[0], dict): embeddings_vectors_list = [] metadata_list = [] for doc_id, metadata in enumerate(metadatas): - embeddings_vectors_list.append( - embeddings_vectors[0]["dense_vecs"][doc_id] - ) + embeddings_vectors_list.append(embeddings_vectors[0]["dense_vecs"][doc_id]) metadata["embedding_endpoint_name"] = self.embedding_model_endpoint metadata_list.append(metadata) embeddings_vectors = embeddings_vectors_list metadatas = metadata_list - self.docsearch._OpenSearchVectorSearch__add( - texts, embeddings_vectors, metadatas=metadatas - ) + self.docsearch._OpenSearchVectorSearch__add(texts, embeddings_vectors, metadatas=metadatas) def lambda_handler(event, context): logger.info(event) - authorizer_type = ( - event["requestContext"].get("authorizer", {}).get("authorizerType") - ) + authorizer_type = event["requestContext"].get("authorizer", {}).get("authorizerType") if authorizer_type == "lambda_authorizer": claims = json.loads(event["requestContext"]["authorizer"]["claims"]) if "use_api_key" in claims: @@ -144,10 +133,9 @@ def lambda_handler(event, context): if resource == PRESIGNED_URL_RESOURCE: input_body = json.loads(event["body"]) file_name = f"intentions/{group_name}/[{input_body['timestamp']}]{input_body['file_name']}" - presigned_url = __gen_presigned_url(file_name, - input_body.get( - "content_type", DEFAULT_CONTENT_TYPE), - input_body.get("expiration", 60*60)) + presigned_url = __gen_presigned_url( + file_name, input_body.get("content_type", DEFAULT_CONTENT_TYPE), input_body.get("expiration", 60 * 60) + ) output = { "message": "The S3 presigned url is generated", "data": { @@ -155,7 +143,6 @@ def lambda_handler(event, context): "s3Bucket": s3_bucket_name, "s3Prefix": file_name, }, - } elif resource.startswith(EXECUTION_RESOURCE): if http_method == "POST": @@ -197,7 +184,7 @@ def __delete_execution(event, group_name): "intentionId": execution_id, }, ) - item = index_response.get('Item') + item = index_response.get("Item") if item: indexes = item.get("index").split(",") details = json.loads(item.get("details")) @@ -214,6 +201,7 @@ def __delete_execution(event, group_name): ) return res + # def __can_be_deleted(execution_id): # return False, "" @@ -221,25 +209,17 @@ def __delete_execution(event, group_name): def __delete_documents_by_text_set(index_name, text_values): # Search for the documents based on the "text" field matching any value in text_values set - search_body = { - "size": 10000, - "query": { - "terms": { - "text.keyword": list(text_values) # Convert set to list - } - } - } + search_body = {"size": 10000, "query": {"terms": {"text.keyword": list(text_values)}}} # Convert set to list # Perform the search try: - search_result = aos_client.search( - index=index_name, body=search_body) # Adjust size if needed - hits = search_result['hits']['hits'] + search_result = aos_client.search(index=index_name, body=search_body) # Adjust size if needed + hits = search_result["hits"]["hits"] # If documents exist, delete them if hits: for hit in hits: - doc_id = hit['_id'] + doc_id = hit["_id"] aos_client.delete(index=index_name, id=doc_id) logger.info("Deleted document with id %s", doc_id) except NotFoundError: @@ -247,10 +227,7 @@ def __delete_documents_by_text_set(index_name, text_values): def __get_query_parameter(event, parameter_name, default_value=None): - if ( - event.get("queryStringParameters") - and parameter_name in event["queryStringParameters"] - ): + if event.get("queryStringParameters") and parameter_name in event["queryStringParameters"]: return event["queryStringParameters"][parameter_name] return default_value @@ -258,8 +235,7 @@ def __get_query_parameter(event, parameter_name, default_value=None): def __gen_presigned_url(object_name: str, content_type: str, expiration: int): return s3_client.generate_presigned_url( ClientMethod="put_object", - Params={"Bucket": s3_bucket_name, - "Key": object_name, "ContentType": content_type}, + Params={"Bucket": s3_bucket_name, "Key": object_name, "ContentType": content_type}, ExpiresIn=expiration, HttpMethod="PUT", ) @@ -276,24 +252,20 @@ def __list_execution(event, group_name): } response = dynamodb_client.query( TableName=intention_table_name, - KeyConditionExpression='groupName = :groupName', - ExpressionAttributeValues={ - ':groupName': {'S': group_name} - } + KeyConditionExpression="groupName = :groupName", + ExpressionAttributeValues={":groupName": {"S": group_name}}, ) output = {} page_json = [] - items = response['Items'] - while 'LastEvaluatedKey' in response: + items = response["Items"] + while "LastEvaluatedKey" in response: response = dynamodb_client.query( TableName=intention_table_name, - KeyConditionExpression='groupName = :pk_val', - ExpressionAttributeValues={ - ':pk_val': {'S': group_name} - }, - ExclusiveStartKey=response['LastEvaluatedKey'] + KeyConditionExpression="groupName = :pk_val", + ExpressionAttributeValues={":pk_val": {"S": group_name}}, + ExclusiveStartKey=response["LastEvaluatedKey"], ) - items.extend(response['Items']) + items.extend(response["Items"]) for item in items: item_json = {} @@ -331,15 +303,14 @@ def __create_execution(event, context, email, group_name): bucket = input_body.get("s3Bucket") prefix = input_body.get("s3Prefix") s3_response = __get_s3_object_with_retry(bucket, prefix) - file_content = s3_response['Body'].read() + file_content = s3_response["Body"].read() excel_file = BytesIO(file_content) workbook = load_workbook(excel_file) sheet = workbook.active qaList = [] for row in sheet.iter_rows(min_row=2, values_only=True): - question, intention, kwargs = row[0], row[1], row[2] if len( - row) > 2 else None + question, intention, kwargs = row[0], row[1], row[2] if len(row) > 2 else None if not question: continue # for i, element in enumerate(qaList): @@ -350,11 +321,7 @@ def __create_execution(event, context, email, group_name): # "kwargs": kwargs # } # return qaList - qaList.append({ - "question": question, - "intention": intention, - "kwargs": kwargs - }) + qaList.append({"question": question, "intention": intention, "kwargs": kwargs}) # write to ddb(meta data) intention_table.put_item( Item={ @@ -366,19 +333,14 @@ def __create_execution(event, context, email, group_name): "tag": execution_detail["index"], "File": f'{bucket}{input_body.get("s3Prefix")}', "LastModifiedBy": email, - "LastModifiedTime": re.findall(r'\[(.*?)\]', input_body.get("s3Prefix"))[0], - "details": json.dumps(qaList) + "LastModifiedTime": re.findall(r"\[(.*?)\]", input_body.get("s3Prefix"))[0], + "details": json.dumps(qaList), } ) # write to aos(vectorData) - __save_2_aos(input_body.get("model"), - execution_detail["index"], qaList, bucket, prefix) + __save_2_aos(input_body.get("model"), execution_detail["index"], qaList, bucket, prefix) - return { - "execution_id": execution_detail["tableItemId"], - "input_payload": execution_detail, - "result": "success" - } + return {"execution_id": execution_detail["tableItemId"], "input_payload": execution_detail, "result": "success"} def convert_qa_list(qa_list: list, bucket: str, prefix: str) -> List[Document]: @@ -392,7 +354,7 @@ def convert_qa_list(qa_list: list, bucket: str, prefix: str) -> List[Document]: "file_path": "", "keywords": [], "summary": "", - "type": "Intent" + "type": "Intent", } page_content = qa["question"] metadata = metadata_template @@ -412,9 +374,7 @@ def convert_qa_list(qa_list: list, bucket: str, prefix: str) -> List[Document]: def __save_2_aos(modelId: str, index: str, qaListParam: list, bucket: str, prefix: str): qaList = __deduplicate_by_key(qaListParam, "question") if kb_enabled: - embedding_function = sm_utils.getCustomEmbeddings( - embedding_model_endpoint, region, "bce" - ) + embedding_function = sm_utils.getCustomEmbeddings(embedding_model_endpoint, region, "bce") docsearch = OpenSearchVectorSearch( index_name=index, embedding_function=embedding_function, @@ -438,23 +398,11 @@ def __save_2_aos(modelId: str, index: str, qaListParam: list, bucket: str, prefi def __create_index(index: str, modelId: str): body = { "settings": { - "index": { - "number_of_shards": 1, - "number_of_replicas": 0, - "knn": True, - "knn.algo_param.ef_search": 32 - } + "index": {"number_of_shards": 1, "number_of_replicas": 0, "knn": True, "knn.algo_param.ef_search": 32} }, "mappings": { "properties": { - "text": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword" - } - } - }, + "text": {"type": "text", "fields": {"keyword": {"type": "keyword"}}}, "sentence_vector": { "type": "knn_vector", "dimension": ModelDimensionMap[modelId], @@ -462,14 +410,11 @@ def __create_index(index: str, modelId: str): "engine": "nmslib", "space_type": "l2", "name": "hnsw", - "parameters": { - "ef_construction": 512, - "m": 16 - } - } - } + "parameters": {"ef_construction": 512, "m": 16}, + }, + }, } - } + }, } try: aos_client.indices.create(index=index, body=body) @@ -479,8 +424,7 @@ def __create_index(index: str, modelId: str): def __refresh_index(index: str, modelId: str, qaList): - success, failed = helpers.bulk(aos_client, __append_embeddings( - index, modelId, qaList), chunk_size=BULK_SIZE) + success, failed = helpers.bulk(aos_client, __append_embeddings(index, modelId, qaList), chunk_size=BULK_SIZE) aos_client.indices.refresh(index=index) logger.info("Successfully added: %d ", success) logger.info("Failed: %d ", len(failed)) @@ -491,14 +435,9 @@ def __append_embeddings(index, modelId, qaList: list): documents = [] for item in qaList: question = item["question"] - embedding_func = BedrockEmbeddings( - client=bedrock_client, - model_id=modelId - ) + embedding_func = BedrockEmbeddings(client=bedrock_client, model_id=modelId) - embeddings_vectors = embedding_func.embed_documents( - [question] - ) + embeddings_vectors = embedding_func.embed_documents([question]) documents.append( { "text": question, @@ -506,25 +445,20 @@ def __append_embeddings(index, modelId, qaList: list): "answer": item["intention"], "source": "portal", **({"kwargs": item["kwargs"]} if item.get("kwargs") else {}), - "type": "Intent" + "type": "Intent", }, - "sentence_vector": embeddings_vectors[0] + "sentence_vector": embeddings_vectors[0], } ) for document in documents: index_list = index.split(",") for index_item in index_list: - doc_id = hashlib.md5(str(document["text"]).encode('utf-8')).hexdigest() - action = { - "_op_type": "index", - "_index": index_item, - "_id": doc_id, - "_source": document - } + doc_id = hashlib.md5(str(document["text"]).encode("utf-8")).hexdigest() + action = {"_op_type": "index", "_index": index_item, "_id": doc_id, "_source": document} actions.append(action) return actions - # yield {"_op_type": "index", "_index": index_item, "_source": document, "_id": hashlib.md5(str(document).encode('utf-8')).hexdigest()} + # yield {"_op_type": "index", "_index": index_item, "_source": document, "_id": hashlib.md5(str(document).encode('utf-8')).hexdigest()} def __get_execution(event, group_name): @@ -535,7 +469,7 @@ def __get_execution(event, group_name): "intentionId": executionId, }, ) - item = index_response['Item'] + item = index_response["Item"] res = {} Items = [] # for item in items: @@ -543,10 +477,10 @@ def __get_execution(event, group_name): for key in list(item.keys()): value = item.get(key) if key == "File": - split_index = value.rfind('/') + split_index = value.rfind("/") if split_index != -1: item_json["s3Path"] = value[:split_index] - item_json["s3Prefix"] = value[split_index + 1:] + item_json["s3Prefix"] = value[split_index + 1 :] else: item_json["s3Path"] = value item_json["s3Prefix"] = "-" @@ -576,12 +510,12 @@ def __get_s3_object_with_retry(bucket: str, key: str, max_retries: int = 5, dela raise time.sleep(delay) + def __download_template(): url = s3_client.generate_presigned_url( ClientMethod="get_object", - Params={'Bucket': s3_bucket_name, - 'Key': "templates/intention_corpus.xlsx"}, - ExpiresIn=60 + Params={"Bucket": s3_bucket_name, "Key": "templates/intention_corpus.xlsx"}, + ExpiresIn=60, ) return url @@ -595,7 +529,7 @@ def __index_used_scan(event, group_name): }, ) pre_model = index_response.get("Item") - model_name = '' + model_name = "" if pre_model: model_response = model_table.get_item( Key={ @@ -603,25 +537,12 @@ def __index_used_scan(event, group_name): "modelId": pre_model.get("modelIds", {}).get("embedding"), } ) - model_name = model_response.get("Item", {}).get( - "parameter", {}).get("ModelName", "") + model_name = model_response.get("Item", {}).get("parameter", {}).get("ModelName", "") # model_name = model_response.get("ModelName", {}).get("S","-") if not pre_model or model_name == input_body.get("model"): - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps({ - "result": "valid" - }) - } + return {"statusCode": 200, "headers": resp_header, "body": json.dumps({"result": "valid"})} else: - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps({ - "result": "invalid" - } - )} + return {"statusCode": 200, "headers": resp_header, "body": json.dumps({"result": "invalid"})} def __deduplicate_by_key(lst, key): @@ -630,10 +551,8 @@ def __deduplicate_by_key(lst, key): seen[element[key]] = element return list(seen.values()) + def __get_query_parameter(event, parameter_name, default_value=None): - if ( - event.get("queryStringParameters") - and parameter_name in event["queryStringParameters"] - ): + if event.get("queryStringParameters") and parameter_name in event["queryStringParameters"]: return event["queryStringParameters"][parameter_name] return default_value From 4fa832055df6d12ea4af84fd631c796a2492c9ff Mon Sep 17 00:00:00 2001 From: Xu Han Date: Thu, 17 Oct 2024 05:48:06 +0000 Subject: [PATCH 008/110] feat: support bedrock embedding --- source/lambda/intention/aos/sm_utils.py | 2 +- source/lambda/intention/intention.py | 46 +++++++++++++------------ 2 files changed, 25 insertions(+), 23 deletions(-) diff --git a/source/lambda/intention/aos/sm_utils.py b/source/lambda/intention/aos/sm_utils.py index 794f61616..c469d2d17 100644 --- a/source/lambda/intention/aos/sm_utils.py +++ b/source/lambda/intention/aos/sm_utils.py @@ -5,7 +5,7 @@ import boto3 from langchain.callbacks.manager import CallbackManagerForLLMRun -from langchain.embeddings import BedrockEmbeddings, SagemakerEndpointEmbeddings +from langchain_community.embeddings import BedrockEmbeddings, SagemakerEndpointEmbeddings from langchain.embeddings.sagemaker_endpoint import EmbeddingsContentHandler from langchain.llms.sagemaker_endpoint import LLMContentHandler, SagemakerEndpoint from langchain.llms.utils import enforce_stop_tokens diff --git a/source/lambda/intention/intention.py b/source/lambda/intention/intention.py index e3871b5a1..8176f4db2 100644 --- a/source/lambda/intention/intention.py +++ b/source/lambda/intention/intention.py @@ -9,7 +9,7 @@ import boto3 from aos import sm_utils -from aos.aos_utils import LLMBotOpenSearchClient +from embeddings import get_embedding_info from botocore.paginate import TokenEncoder from constant import ( BULK_SIZE, @@ -24,7 +24,7 @@ ModelDimensionMap, ) from langchain.docstore.document import Document -from langchain.embeddings.bedrock import BedrockEmbeddings +from langchain_community.embeddings import BedrockEmbeddings from langchain_community.vectorstores import OpenSearchVectorSearch from langchain_community.vectorstores.opensearch_vector_search import ( OpenSearchVectorSearch, @@ -56,30 +56,31 @@ model_table = dynamodb_client.Table(model_table_name) sm_client = boto3.client("secretsmanager") -try: - master_user = sm_client.get_secret_value(SecretId=aos_secret)["SecretString"] - secret_body = sm_client.get_secret_value(SecretId=SECRET_NAME)["SecretString"] - secret = json.loads(secret_body) - username = secret.get("username") - password = secret.get("password") - - if not aos_endpoint: - opensearch_client = boto3.client("opensearch") - response = opensearch_client.describe_domain(DomainName=aos_domain_name) - aos_endpoint = response["DomainStatus"]["Endpoint"] - aos_client = LLMBotOpenSearchClient(aos_endpoint, (username, password)).client -except sm_client.exceptions.ResourceNotFoundException: - logger.info("Secret '%s' not found in Secrets Manager", aos_secret) -except Exception as err: - logger.error("Error retrieving secret '%s': %s", aos_secret, str(err)) - raise dynamodb_client = boto3.client("dynamodb") s3_client = boto3.client("s3") bedrock_client = boto3.client("bedrock-runtime", region_name=bedrock_region) -credentials = boto3.Session().get_credentials() -awsauth = AWS4Auth(refreshable_credentials=credentials, region=region, service="es") +if not aos_endpoint: + opensearch_client = boto3.client("opensearch") + response = opensearch_client.describe_domain(DomainName=aos_domain_name) + aos_endpoint = response["DomainStatus"]["Endpoint"] + +try: + master_user = sm_client.get_secret_value(SecretId=SECRET_NAME)[ + "SecretString" + ] + cred = json.loads(master_user) + username = cred.get("username") + password = cred.get("password") + awsauth = (username, password) + +except sm_client.exceptions.ResourceNotFoundException: + credentials = boto3.Session().get_credentials() + awsauth = AWS4Auth(refreshable_credentials=credentials, region=region, service="es") +except Exception as e: + logger.error(f"Error retrieving secret '{aos_secret}': {str(e)}") + raise resp_header = { "Content-Type": "application/json", @@ -374,7 +375,8 @@ def convert_qa_list(qa_list: list, bucket: str, prefix: str) -> List[Document]: def __save_2_aos(modelId: str, index: str, qaListParam: list, bucket: str, prefix: str): qaList = __deduplicate_by_key(qaListParam, "question") if kb_enabled: - embedding_function = sm_utils.getCustomEmbeddings(embedding_model_endpoint, region, "bce") + embedding_info = get_embedding_info(embedding_model_endpoint) + embedding_function = sm_utils.getCustomEmbeddings(embedding_model_endpoint, region, embedding_info.get("ModelType")) docsearch = OpenSearchVectorSearch( index_name=index, embedding_function=embedding_function, From 316f1806cf52416e6927fccc216df1e05183ea8b Mon Sep 17 00:00:00 2001 From: Xu Han Date: Thu, 17 Oct 2024 06:17:12 +0000 Subject: [PATCH 009/110] fix: add aos_client --- source/lambda/intention/intention.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/source/lambda/intention/intention.py b/source/lambda/intention/intention.py index 8176f4db2..0b4f6a7c4 100644 --- a/source/lambda/intention/intention.py +++ b/source/lambda/intention/intention.py @@ -30,7 +30,7 @@ OpenSearchVectorSearch, ) from openpyxl import load_workbook -from opensearchpy import NotFoundError, RequestError, RequestsHttpConnection, helpers +from opensearchpy import NotFoundError, RequestError, RequestsHttpConnection, helpers, OpenSearch from requests_aws4auth import AWS4Auth logger = logging.getLogger(__name__) @@ -82,6 +82,19 @@ logger.error(f"Error retrieving secret '{aos_secret}': {str(e)}") raise +aos_client = OpenSearch( + hosts=[ + { + "host": aos_endpoint.replace("https://", ""), + "port": int(os.environ.get("AOS_PORT", 443)), + } + ], + http_auth=awsauth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, +) + resp_header = { "Content-Type": "application/json", "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", From f786e764caa96d3a82788ad484c06e49cf7df16f Mon Sep 17 00:00:00 2001 From: zhouxss Date: Thu, 17 Oct 2024 12:35:52 +0000 Subject: [PATCH 010/110] add langchain_integration folder;support lazy load model and chains --- .../common_logic/common_utils/constant.py | 12 +- .../common_logic/common_utils/prompt_utils.py | 4 +- source/lambda/online/lambda_agent/agent.py | 2 +- .../tool_calling_chain_claude_xml.py | 2 +- .../llm_generate_utils/llm_models.py | 1 + .../langchain_integration/chains/__init__.py | 194 ++++++++ .../chains/chat_chain.py | 338 +++++++++++++ .../chains/conversation_summary_chain.py | 215 +++++++++ .../chains/hyde_chain.py | 103 ++++ .../chains/intention_chain.py | 224 +++++++++ .../chains/llm_chain_base.py | 26 + .../chains/marketing_chains/__init__.py | 15 + .../mkt_conversation_summary.py | 120 +++++ .../chains/marketing_chains/mkt_rag_chain.py | 55 +++ .../chains/query_rewrite_chain.py | 143 ++++++ .../langchain_integration/chains/rag_chain.py | 161 +++++++ .../chains/retail_chains/__init__.py | 26 + .../retail_chains/auto_evaluation_chain.py | 99 ++++ .../retail_conversation_summary_chain.py | 208 ++++++++ .../retail_tool_calling_chain_claude_xml.py | 354 ++++++++++++++ .../retail_tool_calling_chain_json.py | 455 ++++++++++++++++++ .../chains/stepback_chain.py | 138 ++++++ .../chains/tool_calling_chain_claude_xml.py | 320 ++++++++++++ .../chains/translate_chain.py | 40 ++ .../chat_models/__init__.py | 97 ++++ .../chat_models/bedrock_models.py | 77 +++ .../chat_models/openai_models.py | 28 ++ 27 files changed, 3450 insertions(+), 7 deletions(-) create mode 100644 source/lambda/online/langchain_integration/chains/__init__.py create mode 100644 source/lambda/online/langchain_integration/chains/chat_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/conversation_summary_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/hyde_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/intention_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/llm_chain_base.py create mode 100644 source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py create mode 100644 source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py create mode 100644 source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/query_rewrite_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/rag_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/retail_chains/__init__.py create mode 100644 source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py create mode 100644 source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py create mode 100644 source/lambda/online/langchain_integration/chains/stepback_chain.py create mode 100644 source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py create mode 100644 source/lambda/online/langchain_integration/chains/translate_chain.py create mode 100644 source/lambda/online/langchain_integration/chat_models/__init__.py create mode 100644 source/lambda/online/langchain_integration/chat_models/bedrock_models.py create mode 100644 source/lambda/online/langchain_integration/chat_models/openai_models.py diff --git a/source/lambda/online/common_logic/common_utils/constant.py b/source/lambda/online/common_logic/common_utils/constant.py index 518d35daf..c14ee8544 100644 --- a/source/lambda/online/common_logic/common_utils/constant.py +++ b/source/lambda/online/common_logic/common_utils/constant.py @@ -82,17 +82,19 @@ class LLMTaskType(ConstantBase): HYDE_TYPE = "hyde" CONVERSATION_SUMMARY_TYPE = "conversation_summary" RETAIL_CONVERSATION_SUMMARY_TYPE = "retail_conversation_summary" - MKT_CONVERSATION_SUMMARY_TYPE = "mkt_conversation_summary" MKT_QUERY_REWRITE_TYPE = "mkt_query_rewrite" STEPBACK_PROMPTING_TYPE = "stepback_prompting" - TOOL_CALLING = "tool_calling" + TOOL_CALLING_XML = "tool_calling_xml" + TOOL_CALLING_API = "tool_calling_api" RETAIL_TOOL_CALLING = "retail_tool_calling" RAG = "rag" + MTK_RAG = "mkt_rag" CHAT = 'chat' AUTO_EVALUATION = "auto_evaluation" + class MessageType(ConstantBase): HUMAN_MESSAGE_TYPE = 'human' AI_MESSAGE_TYPE = 'ai' @@ -133,12 +135,16 @@ class LLMModelType(ConstantBase): INTERNLM2_CHAT_7B = "internlm2-chat-7b" INTERNLM2_CHAT_20B = "internlm2-chat-20b" GLM_4_9B_CHAT = "glm-4-9b-chat" - CHATGPT_35_TURBO = "gpt-3.5-turbo-0125" + CHATGPT_35_TURBO_0125 = "gpt-3.5-turbo-0125" CHATGPT_4_TURBO = "gpt-4-turbo" CHATGPT_4O = "gpt-4o" QWEN2INSTRUCT7B = "qwen2-7B-instruct" QWEN2INSTRUCT72B = "qwen2-72B-instruct" QWEN15INSTRUCT32B = "qwen1_5-32B-instruct" + LLAMA3_1_70B_INSTRUCT = "meta.llama3-1-70b-instruct-v1:0" + MISTRAL_LARGE_2407 = "mistral.mistral-large-2407-v1:0" + COHERE_COMMAND_R_PLUS = "cohere.command-r-plus-v1:0" + class EmbeddingModelType(ConstantBase): diff --git a/source/lambda/online/common_logic/common_utils/prompt_utils.py b/source/lambda/online/common_logic/common_utils/prompt_utils.py index a97d3dc32..953f24b84 100644 --- a/source/lambda/online/common_logic/common_utils/prompt_utils.py +++ b/source/lambda/online/common_logic/common_utils/prompt_utils.py @@ -338,7 +338,7 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.CLAUDE_3_SONNET, LLMModelType.CLAUDE_3_5_SONNET, ], - task_type=LLMTaskType.TOOL_CALLING, + task_type=LLMTaskType.TOOL_CALLING_XML, prompt_template=AGENT_USER_PROMPT, prompt_name="user_prompt" ) @@ -363,7 +363,7 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.CLAUDE_3_SONNET, LLMModelType.CLAUDE_3_5_SONNET, ], - task_type=LLMTaskType.TOOL_CALLING, + task_type=LLMTaskType.TOOL_CALLING_XML, prompt_template=AGENT_GUIDELINES_PROMPT, prompt_name="guidelines_prompt" ) diff --git a/source/lambda/online/lambda_agent/agent.py b/source/lambda/online/lambda_agent/agent.py index da898a4b6..495e2587c 100644 --- a/source/lambda/online/lambda_agent/agent.py +++ b/source/lambda/online/lambda_agent/agent.py @@ -26,7 +26,7 @@ def tool_calling(state:dict): "fewshot_examples": state['intent_fewshot_examples'], } - agent_llm_type = state.get("agent_llm_type",None) or LLMTaskType.TOOL_CALLING + agent_llm_type = state.get("agent_llm_type",None) or LLMTaskType.TOOL_CALLING_XML group_name = state['chatbot_config']['group_name'] chatbot_id = state['chatbot_config']['chatbot_id'] diff --git a/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_chains/tool_calling_chain_claude_xml.py b/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_chains/tool_calling_chain_claude_xml.py index b31ab0d69..3fc57da59 100644 --- a/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_chains/tool_calling_chain_claude_xml.py +++ b/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_chains/tool_calling_chain_claude_xml.py @@ -168,7 +168,7 @@ def convert_openai_tool_to_anthropic(tools:list[dict])->str: class Claude2ToolCallingChain(LLMChain): model_id = LLMModelType.CLAUDE_2 - intent_type = LLMTaskType.TOOL_CALLING + intent_type = LLMTaskType.TOOL_CALLING_XML default_model_kwargs = { "max_tokens": 2000, "temperature": 0.1, diff --git a/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_models.py b/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_models.py index 1146cbbdf..5ca6dc14e 100644 --- a/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_models.py +++ b/source/lambda/online/lambda_llm_generate/llm_generate_utils/llm_models.py @@ -2,6 +2,7 @@ import logging import os from datetime import datetime +from langchain_aws.chat_models import ChatBedrockConverse import boto3 diff --git a/source/lambda/online/langchain_integration/chains/__init__.py b/source/lambda/online/langchain_integration/chains/__init__.py new file mode 100644 index 000000000..0453a3ef5 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/__init__.py @@ -0,0 +1,194 @@ +from typing import Any +from common_logic.common_utils.constant import LLMTaskType + + +class LLMChainMeta(type): + def __new__(cls, name, bases, attrs): + new_cls = type.__new__(cls, name, bases, attrs) + if name == "LLMChain": + return new_cls + new_cls.model_map[new_cls.get_chain_id()] = new_cls + return new_cls + + +class LLMChain(metaclass=LLMChainMeta): + model_map = {} + + @classmethod + def get_chain_id(cls): + return cls._get_chain_id(cls.model_id, cls.intent_type) + + @staticmethod + def _get_chain_id(model_id, intent_type): + return f"{model_id}__{intent_type}" + + @classmethod + def get_chain(cls, model_id, intent_type, model_kwargs=None, **kwargs): + # dynamic import + _load_module(intent_type) + return cls.model_map[cls._get_chain_id(model_id, intent_type)].create_chain( + model_kwargs=model_kwargs, **kwargs + ) + +def _import_chat_chain(): + from .chat_chain import ( + Claude2ChatChain, + Claude21ChatChain, + ClaudeInstanceChatChain, + Iternlm2Chat7BChatChain, + Iternlm2Chat20BChatChain, + Baichuan2Chat13B4BitsChatChain, + Claude3HaikuChatChain, + Claude3SonnetChatChain, +) + +def _import_conversation_summary_chain(): + from .conversation_summary_chain import ( + Iternlm2Chat7BConversationSummaryChain, + ClaudeInstanceConversationSummaryChain, + Claude21ConversationSummaryChain, + Claude3HaikuConversationSummaryChain, + Claude3SonnetConversationSummaryChain, + Iternlm2Chat20BConversationSummaryChain +) + +def _import_intention_chain(): + from .intention_chain import ( + Claude21IntentRecognitionChain, + Claude2IntentRecognitionChain, + ClaudeInstanceIntentRecognitionChain, + Claude3HaikuIntentRecognitionChain, + Claude3SonnetIntentRecognitionChain, + Iternlm2Chat7BIntentRecognitionChain, + Iternlm2Chat20BIntentRecognitionChain, + +) + + +def _import_rag_chain(): + from .rag_chain import ( + Claude21RagLLMChain, + Claude2RagLLMChain, + ClaudeInstanceRAGLLMChain, + Claude3HaikuRAGLLMChain, + Claude3SonnetRAGLLMChain, + Baichuan2Chat13B4BitsKnowledgeQaChain +) + + +def _import_translate_chain(): + from .translate_chain import ( + Iternlm2Chat7BTranslateChain, + Iternlm2Chat20BTranslateChain + ) + +def _import_mkt_conversation_summary_chains(): + from marketing_chains.mkt_conversation_summary import ( + Claude21MKTConversationSummaryChain, + ClaudeInstanceMKTConversationSummaryChain, + Claude2MKTConversationSummaryChain, + Claude3HaikuMKTConversationSummaryChain, + Claude3SonnetMKTConversationSummaryChain, + Iternlm2Chat7BMKTConversationSummaryChain, + Iternlm2Chat20BMKTConversationSummaryChain +) + +def _import_mkt_rag_chain(): + from marketing_chains.mkt_rag_chain import ( + Iternlm2Chat7BKnowledgeQaChain, + Iternlm2Chat20BKnowledgeQaChain +) + +def _import_stepback_chain(): + from .stepback_chain import ( + Claude21StepBackChain, + ClaudeInstanceStepBackChain, + Claude2StepBackChain, + Claude3HaikuStepBackChain, + Claude3SonnetStepBackChain, + Iternlm2Chat7BStepBackChain, + Iternlm2Chat20BStepBackChain +) + +def _import_hyde_chain(): + from .hyde_chain import ( + Claude21HydeChain, + Claude2HydeChain, + Claude3HaikuHydeChain, + Claude3SonnetHydeChain, + ClaudeInstanceHydeChain, + Iternlm2Chat20BHydeChain, + Iternlm2Chat7BHydeChain +) + +def _import_query_rewrite_chain(): + from .query_rewrite_chain import ( + Claude21QueryRewriteChain, + Claude2QueryRewriteChain, + ClaudeInstanceQueryRewriteChain, + Claude3HaikuQueryRewriteChain, + Claude3SonnetQueryRewriteChain, + Iternlm2Chat20BQueryRewriteChain, + Iternlm2Chat7BQueryRewriteChain +) + + +def _import_tool_calling_chain_claude_xml(): + from .tool_calling_chain_claude_xml import ( + Claude21ToolCallingChain, + Claude3HaikuToolCallingChain, + Claude2ToolCallingChain, + Claude3SonnetToolCallingChain, + ClaudeInstanceToolCallingChain +) + +def _import_retail_conversation_summary_chain(): + from .retail_chains.retail_conversation_summary_chain import ( + Claude2RetailConversationSummaryChain, + Claude21RetailConversationSummaryChain, + Claude3HaikuRetailConversationSummaryChain, + Claude3SonnetRetailConversationSummaryChain, + ClaudeInstanceRetailConversationSummaryChain +) + + +def _import_retail_tool_calling_chain_claude_xml(): + from .retail_chains.retail_tool_calling_chain_claude_xml import ( + Claude2RetailToolCallingChain, + Claude21RetailToolCallingChain, + ClaudeInstanceRetailToolCallingChain, + Claude3SonnetRetailToolCallingChain, + Claude3HaikuRetailToolCallingChain +) + + +def _import_auto_evaluation_chain(): + from .retail_chains.auto_evaluation_chain import ( + Claude3HaikuAutoEvaluationChain, + Claude21AutoEvaluationChain, + Claude2AutoEvaluationChain + +) + + +def _load_module(intent_type): + assert intent_type in CHAIN_MODULE_LOAD_FN_MAP,(intent_type,CHAIN_MODULE_LOAD_FN_MAP) + CHAIN_MODULE_LOAD_FN_MAP[intent_type]() + + +CHAIN_MODULE_LOAD_FN_MAP = { + LLMTaskType.CHAT:_import_chat_chain, + LLMTaskType.CONVERSATION_SUMMARY_TYPE:_import_conversation_summary_chain, + LLMTaskType.INTENT_RECOGNITION_TYPE: _import_intention_chain, + LLMTaskType.RAG: _import_rag_chain, + LLMTaskType.QUERY_TRANSLATE_TYPE: _import_translate_chain, + LLMTaskType.MKT_CONVERSATION_SUMMARY_TYPE: _import_mkt_conversation_summary_chains, + LLMTaskType.MTK_RAG: _import_mkt_rag_chain, + LLMTaskType.STEPBACK_PROMPTING_TYPE: _import_stepback_chain, + LLMTaskType.HYDE_TYPE: _import_hyde_chain, + LLMTaskType.QUERY_REWRITE_TYPE: _import_query_rewrite_chain, + LLMTaskType.TOOL_CALLING_XML: _import_tool_calling_chain_claude_xml, + LLMTaskType.RETAIL_CONVERSATION_SUMMARY_TYPE: _import_retail_conversation_summary_chain, + LLMTaskType.RETAIL_TOOL_CALLING: _import_retail_tool_calling_chain_claude_xml, + LLMTaskType.AUTO_EVALUATION: _import_auto_evaluation_chain +} diff --git a/source/lambda/online/langchain_integration/chains/chat_chain.py b/source/lambda/online/langchain_integration/chains/chat_chain.py new file mode 100644 index 000000000..730a84904 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/chat_chain.py @@ -0,0 +1,338 @@ +# chat llm chains + +from langchain.schema.runnable import RunnableLambda, RunnablePassthrough +from langchain_core.messages import AIMessage,SystemMessage +from langchain.prompts import ChatPromptTemplate,HumanMessagePromptTemplate +from langchain_core.messages import convert_to_messages + + +from ..llm_models import Model +from .llm_chain_base import LLMChain + +from common_logic.common_utils.constant import ( + MessageType, + LLMTaskType, + LLMModelType, +) +from common_logic.common_utils.time_utils import get_china_now +from common_logic.common_utils.prompt_utils import get_prompt_template + +AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE +HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE +QUERY_TRANSLATE_TYPE = LLMTaskType.QUERY_TRANSLATE_TYPE +SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE + + +class Claude2ChatChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.CHAT + + + @classmethod + def get_common_system_prompt(cls,system_prompt_template:str): + now = get_china_now() + date_str = now.strftime("%Y年%m月%d日") + weekdays = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] + weekday = weekdays[now.weekday()] + system_prompt = system_prompt_template.format(date=date_str,weekday=weekday) + return system_prompt + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + system_prompt_template = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + system_prompt = kwargs.get('system_prompt',system_prompt_template) or "" + system_prompt = cls.get_common_system_prompt(system_prompt) + prefill = kwargs.get('prefill',None) + messages = [ + ("placeholder", "{chat_history}"), + HumanMessagePromptTemplate.from_template("{query}") + ] + if system_prompt: + messages.insert(0,SystemMessage(content=system_prompt)) + + if prefill is not None: + messages.append(AIMessage(content=prefill)) + + messages_template = ChatPromptTemplate.from_messages(messages) + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = messages_template | RunnableLambda(lambda x: x.messages) + if stream: + chain = ( + chain | RunnableLambda(lambda messages: llm.stream(messages)) + | RunnableLambda(lambda x: (i.content for i in x)) + ) + else: + chain = chain | llm | RunnableLambda(lambda x: x.content) + + return chain + + +class Claude21ChatChain(Claude2ChatChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceChatChain(Claude2ChatChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetChatChain(Claude2ChatChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuChatChain(Claude2ChatChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetChatChain(Claude2ChatChain): + model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" + + +class Mixtral8x7bChatChain(Claude2ChatChain): + model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + default_model_kwargs = {"max_tokens": 4096, "temperature": 0.01} + + +class Baichuan2Chat13B4BitsChatChain(LLMChain): + model_id = LLMModelType.BAICHUAN2_13B_CHAT + intent_type = LLMTaskType.CHAT + default_model_kwargs = { + "max_new_tokens": 2048, + "temperature": 0.3, + "top_k": 5, + "top_p": 0.85, + "do_sample": True, + } + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + # chat_history = kwargs.pop('chat_history',[]) + model_kwargs = model_kwargs or {} + model_kwargs.update({"stream": stream}) + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + llm_chain = RunnableLambda(lambda x: llm.invoke(x, stream=stream)) + return llm_chain + + +class Iternlm2Chat7BChatChain(LLMChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = LLMTaskType.CHAT + + default_model_kwargs = {"temperature": 0.5, "max_new_tokens": 1000} + + @staticmethod + def build_prompt( + query: str, + history=[], + meta_instruction="You are an AI assistant whose name is InternLM (书生·浦语).\n" + "- InternLM (书生·浦语) is a conversational language model that is developed by Shanghai AI Laboratory (上海人工智能实验室). It is designed to be helpful, honest, and harmless.\n" + "- InternLM (书生·浦语) can understand and communicate fluently in the language chosen by the user such as English and 中文.", + ): + prompt = "" + if meta_instruction: + prompt += f"""<|im_start|>system\n{meta_instruction}<|im_end|>\n""" + for record in history: + prompt += f"""<|im_start|>user\n{record[0]}<|im_end|>\n<|im_start|>assistant\n{record[1]}<|im_end|>\n""" + prompt += f"""<|im_start|>user\n{query}<|im_end|>\n<|im_start|>assistant\n""" + return prompt + + @classmethod + def create_history(cls, x): + chat_history = x.get("chat_history", []) + chat_history = convert_to_messages(chat_history) + + assert len(chat_history) % 2 == 0, chat_history + history = [] + for i in range(0, len(chat_history), 2): + user_message = chat_history[i] + ai_message = chat_history[i + 1] + assert ( + user_message.type == HUMAN_MESSAGE_TYPE + and ai_message.type == AI_MESSAGE_TYPE + ), chat_history + history.append((user_message.content, ai_message.content)) + return history + + @classmethod + def create_prompt(cls, x,system_prompt=None): + history = cls.create_history(x) + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + prompt = cls.build_prompt( + query=x["query"], + history=history, + meta_instruction=system_prompt, + ) + return prompt + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + stream = kwargs.get("stream", False) + system_prompt = kwargs.get("system_prompt",None) + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + + prompt_template = RunnablePassthrough.assign( + prompt=RunnableLambda(lambda x: cls.create_prompt(x,system_prompt=system_prompt)) + ) + llm_chain = prompt_template | RunnableLambda( + lambda x: llm.invoke(x, stream=stream) + ) + return llm_chain + + +class Iternlm2Chat20BChatChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + + +class GLM4Chat9BChatChain(LLMChain): + model_id = LLMModelType.GLM_4_9B_CHAT + intent_type = LLMTaskType.CHAT + default_model_kwargs = { + "max_new_tokens": 1024, + "timeout": 60, + "temperature": 0.1, + } + @classmethod + def create_chat_history(cls,x, system_prompt=None): + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + chat_history = x['chat_history'] + + if system_prompt is not None: + chat_history = [{"role":"system","content": system_prompt}] + chat_history + chat_history = chat_history + [{"role":MessageType.HUMAN_MESSAGE_TYPE,"content":x['query']}] + + return chat_history + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + system_prompt = kwargs.get("system_prompt",None) + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + **kwargs + ) + + chain = RunnablePassthrough.assign( + chat_history = RunnableLambda(lambda x: cls.create_chat_history(x,system_prompt=system_prompt)) + ) | RunnableLambda(lambda x: llm.invoke(x)) + + return chain + + +class Qwen2Instruct7BChatChain(LLMChain): + model_id = LLMModelType.QWEN2INSTRUCT7B + intent_type = LLMTaskType.CHAT + default_model_kwargs = { + "max_tokens": 1024, + "temperature": 0.1, + } + + @classmethod + def create_chat_history(cls,x, system_prompt=None): + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + chat_history = x['chat_history'] + + if system_prompt is not None: + chat_history = [{"role":"system", "content": system_prompt}] + chat_history + + chat_history = chat_history + [{"role": MessageType.HUMAN_MESSAGE_TYPE, "content":x['query']}] + return chat_history + + + @classmethod + def parse_function_calls_from_ai_message(cls,message:dict): + return message['text'] + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + system_prompt = kwargs.get("system_prompt",None) + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + **kwargs + ) + + chain = RunnablePassthrough.assign( + chat_history = RunnableLambda(lambda x: cls.create_chat_history(x,system_prompt=system_prompt)) + ) | RunnableLambda(lambda x: llm.invoke(x)) | RunnableLambda(lambda x: cls.parse_function_calls_from_ai_message(x)) + + return chain + +class Qwen2Instruct72BChatChain(Qwen2Instruct7BChatChain): + model_id = LLMModelType.QWEN2INSTRUCT72B + + +class Qwen2Instruct72BChatChain(Qwen2Instruct7BChatChain): + model_id = LLMModelType.QWEN15INSTRUCT32B + + +class ChatGPT35ChatChain(LLMChain): + model_id = LLMModelType.CHATGPT_35_TURBO + intent_type = LLMTaskType.CHAT + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + system_prompt = kwargs.get('system_prompt',None) + prefill = kwargs.get('prefill',None) + messages = [ + ("placeholder", "{chat_history}"), + HumanMessagePromptTemplate.from_template("{query}") + ] + if system_prompt is not None: + messages.insert(SystemMessage(content=system_prompt),0) + + if prefill is not None: + messages.append(AIMessage(content=prefill)) + + messages_template = ChatPromptTemplate.from_messages(messages) + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = messages_template | RunnableLambda(lambda x: x.messages) + if stream: + chain = ( + chain | RunnableLambda(lambda messages: llm.stream(messages)) + | RunnableLambda(lambda x: (i.content for i in x)) + ) + else: + chain = chain | llm | RunnableLambda(lambda x: x.content) + + return chain + +class ChatGPT4ChatChain(ChatGPT35ChatChain): + model_id = LLMModelType.CHATGPT_4_TURBO + +class ChatGPT4oChatChain(ChatGPT35ChatChain): + model_id = LLMModelType.CHATGPT_4O diff --git a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py new file mode 100644 index 000000000..c3f1aa1db --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py @@ -0,0 +1,215 @@ +# conversation summary chain +from typing import List +import json +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + + +from ..llm_models import Model +from .chat_chain import Iternlm2Chat7BChatChain +from .llm_chain_base import LLMChain +from common_logic.common_utils.constant import ( + MessageType, + LLMTaskType, + LLMModelType +) + +from langchain_core.messages import( + AIMessage, + BaseMessage, + HumanMessage, + SystemMessage, + convert_to_messages +) +from langchain.prompts import ( + HumanMessagePromptTemplate, + ChatPromptTemplate +) + +from common_logic.common_utils.prompt_utils import get_prompt_template +from common_logic.common_utils.logger_utils import get_logger,print_llm_messages + +logger = get_logger("conversation_summary") + +AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE +HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE +QUERY_TRANSLATE_TYPE = LLMTaskType.QUERY_TRANSLATE_TYPE +SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE + + +class Iternlm2Chat20BConversationSummaryChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + default_model_kwargs = { + "max_new_tokens": 300, + "temperature": 0.1, + "stop_tokens": ["\n\n"], + } + + @classmethod + def create_prompt(cls, x,system_prompt=None): + chat_history = x["chat_history"] + conversational_contexts = [] + for his in chat_history: + role = his['role'] + assert role in [HUMAN_MESSAGE_TYPE, AI_MESSAGE_TYPE] + if role == HUMAN_MESSAGE_TYPE: + conversational_contexts.append(f"USER: {his['content']}") + else: + conversational_contexts.append(f"AI: {his['content']}") + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + conversational_context = "\n".join(conversational_contexts) + prompt = cls.build_prompt( + system_prompt.format( + history=conversational_context, question=x["query"] + ) + ) + prompt = prompt + "Standalone Question: " + return prompt + +class Iternlm2Chat7BConversationSummaryChain(Iternlm2Chat20BConversationSummaryChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + + +class Claude2ConversationSummaryChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.CONVERSATION_SUMMARY_TYPE + + default_model_kwargs = {"max_tokens": 2000, "temperature": 0.1, "top_p": 0.9} + prefill = "From PersonU's point of view, here is the single standalone sentence:" + + @staticmethod + def create_conversational_context(chat_history:List[BaseMessage]): + conversational_contexts = [] + for his in chat_history: + assert isinstance(his,(AIMessage,HumanMessage)), his + content = his.content + if isinstance(his,HumanMessage): + conversational_contexts.append(f"USER: {content}") + else: + conversational_contexts.append(f"AI: {content}") + conversational_context = "\n".join(conversational_contexts) + return conversational_context + + @classmethod + def format_conversation(cls,conversation:list[BaseMessage]): + conversation_strs = [] + for message in conversation: + assert isinstance(message,(AIMessage,HumanMessage)), message + content = message.content + if isinstance(message, HumanMessage): + conversation_strs.append(f"PersonU: {content}") + elif isinstance(message, AIMessage): + conversation_strs.append(f"PersonA: {content}") + return "\n".join(conversation_strs) + + @classmethod + def create_messages_inputs(cls,x:dict,user_prompt,few_shots:list[dict]): + # create few_shots + few_shot_messages = [] + for few_shot in few_shots: + conversation=cls.format_conversation( + convert_to_messages(few_shot['conversation']) + ) + few_shot_messages.append(HumanMessage(content=user_prompt.format( + conversation=conversation, + current_query=few_shot['conversation'][-1]['content'] + ))) + few_shot_messages.append(AIMessage(content=f"{cls.prefill} {few_shot['rewrite_query']}")) + + # create current cocnversation + cur_messages = convert_to_messages( + x['chat_history'] + [{"role":MessageType.HUMAN_MESSAGE_TYPE,"content":x['query']}] + ) + + conversation = cls.format_conversation(cur_messages) + return { + "conversation":conversation, + "few_shots":few_shot_messages, + "current_query": x['query'] + } + + @classmethod + def create_messages_chain(cls,**kwargs): + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + user_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="user_prompt" + ).prompt_template + + few_shots = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="few_shots" + ).prompt_template + + system_prompt = kwargs.get("system_prompt", system_prompt) + user_prompt = kwargs.get('user_prompt', user_prompt) + + cqr_template = ChatPromptTemplate.from_messages([ + SystemMessage(content=system_prompt), + ('placeholder','{few_shots}'), + HumanMessagePromptTemplate.from_template(user_prompt), + AIMessage(content=cls.prefill) + ]) + return RunnableLambda(lambda x: cls.create_messages_inputs(x,user_prompt=user_prompt,few_shots=json.loads(few_shots))) | cqr_template + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + messages_chain = cls.create_messages_chain(**kwargs) + chain = messages_chain | RunnableLambda(lambda x: print_llm_messages(f"conversation summary messages: {x.messages}") or x.messages) \ + | llm | RunnableLambda(lambda x: x.content) + return chain + + +class Claude21ConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Qwen2Instruct72BConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.QWEN2INSTRUCT72B + + +class Qwen2Instruct72BConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.QWEN15INSTRUCT32B + + +class Qwen2Instruct7BConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.QWEN2INSTRUCT7B + + +class GLM4Chat9BConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.GLM_4_9B_CHAT + + diff --git a/source/lambda/online/langchain_integration/chains/hyde_chain.py b/source/lambda/online/langchain_integration/chains/hyde_chain.py new file mode 100644 index 000000000..de3b0f0dd --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/hyde_chain.py @@ -0,0 +1,103 @@ +# hyde + +from langchain.prompts import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, +) +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) + +from ..chains import LLMChain +from ..chat_models import Model as LLM_Model +from .chat_chain import Iternlm2Chat7BChatChain +from .llm_chain_base import LLMChain + +HYDE_TYPE = LLMTaskType.HYDE_TYPE + +WEB_SEARCH_TEMPLATE = """Please write a passage to answer the question +Question: {query} +Passage:""" +# hyde_web_search_template = PromptTemplate(template=WEB_SEARCH_TEMPLATE, input_variables=["query"]) + + +class Claude2HydeChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = HYDE_TYPE + + default_model_kwargs = { + "temperature": 0.5, + "max_tokens": 1000, + "stop_sequences": ["\n\nHuman:"], + } + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + # query_key = kwargs.pop("query_key", "query") + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + llm = LLM_Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + prompt = ChatPromptTemplate.from_messages( + [HumanMessagePromptTemplate.from_template(WEB_SEARCH_TEMPLATE)] + ) + chain = RunnablePassthrough.assign( + hyde_doc=prompt | llm | RunnableLambda(lambda x: x.content) + ) + return chain + + +class Claude21HydeChain(Claude2HydeChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceHydeChain(Claude2HydeChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetHydeChain(Claude2HydeChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuHydeChain(Claude2HydeChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetHydeChain(Claude2HydeChain): + model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" + + +internlm2_meta_instruction = "You are a helpful AI Assistant." + + +class Iternlm2Chat7BHydeChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = HYDE_TYPE + + default_model_kwargs = {"temperature": 0.1, "max_new_tokens": 200} + + @classmethod + def create_prompt(cls, x): + query = f"""Please write a brief passage to answer the question. \nQuestion: {prompt}""" + prompt = ( + cls.build_prompt( + query=query, + meta_instruction=internlm2_meta_instruction, + ) + + "Passage: " + ) + return prompt + + +class Iternlm2Chat20BHydeChain(Iternlm2Chat7BHydeChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + intent_type = HYDE_TYPE diff --git a/source/lambda/online/langchain_integration/chains/intention_chain.py b/source/lambda/online/langchain_integration/chains/intention_chain.py new file mode 100644 index 000000000..292023fda --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/intention_chain.py @@ -0,0 +1,224 @@ +import json +import os +from functools import lru_cache +from random import Random + +from langchain.prompts import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, +) +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + +from common_logic.common_utils.constant import LLMTaskType,LLMModelType +from ..llm_models import Model +from .chat_chain import Iternlm2Chat7BChatChain +from .llm_chain_base import LLMChain + +abs_dir = os.path.dirname(__file__) + +intent_save_path = os.path.join( + os.path.dirname(os.path.dirname(abs_dir)), + "intent_utils", + "intent_examples", + "examples.json", +) + + +@lru_cache() +def load_intention_file(intent_save_path=intent_save_path, seed=42): + intent_few_shot_examples = json.load(open(intent_save_path)) + intent_indexs = { + intent_d["intent"]: intent_d["index"] + for intent_d in intent_few_shot_examples["intents"] + } + few_shot_examples = [] + intents = list(intent_few_shot_examples["examples"].keys()) + for intent in intents: + examples = intent_few_shot_examples["examples"][intent] + for query in examples: + few_shot_examples.append({"intent": intent, "query": query}) + # shuffle + Random(seed).shuffle(few_shot_examples) + return { + "few_shot_examples": few_shot_examples, + "intent_indexs": intent_indexs, + } + + +class Iternlm2Chat7BIntentRecognitionChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type =LLMTaskType.INTENT_RECOGNITION_TYPE + + default_model_kwargs = { + "temperature": 0.1, + "max_new_tokens": 100, + "stop_tokens": ["\n", "。", "."], + } + + @classmethod + def create_prompt(cls, x): + r = load_intention_file(intent_save_path) + few_shot_examples = r["few_shot_examples"] + # intent_indexs = r['intent_indexs'] + exmaple_template = "问题: {query}\n类别: {label}" + example_strs = [] + for example in few_shot_examples: + example_strs.append( + exmaple_template.format(query=example["query"], label=example["intent"]) + ) + + example_str = "\n\n".join(example_strs) + + meta_instruction = f"你是一个问题分类助理,正在对用户的问题进行分类。为了辅助你进行问题分类,下面给出一些示例:\n{example_str}" + query_str = exmaple_template.format(query=x["query"], label="") + prompt_template = """请对下面的问题进行分类: + {query_str} + """ + prompt = cls.build_prompt( + prompt_template.format(query_str=query_str), + meta_instruction=meta_instruction, + ) + prompt = prompt + f"根据前面给到的示例, 问题{x['query']}属于类别:" + + return prompt + + @staticmethod + def postprocess(intent): + intent = intent.replace("。", "").replace(".", "").strip().strip("**") + r = load_intention_file(intent_save_path) + intent_indexs = r["intent_indexs"] + assert intent in intent_indexs, (intent, intent_indexs) + return intent + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + chain = chain | RunnableLambda(lambda x: cls.postprocess(x)) + return chain + + +class Iternlm2Chat20BIntentRecognitionChain(Iternlm2Chat7BIntentRecognitionChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + + +INTENT_RECOGINITION_PROMPT_TEMPLATE_CLUADE = """Please classify this query: {query}. The categories are: + +{categories} + +Some examples of how to classify queries: +{examples} + +Now classify the original query. Respond with just one letter corresponding to the correct category. +""" + + +INTENT_RECOGINITION_EXAMPLE_TEMPLATE = """{query}\n{label}""" + + +class Claude2IntentRecognitionChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.INTENT_RECOGNITION_TYPE + + default_model_kwargs = { + "temperature": 0, + "max_tokens": 2000, + "stop_sequences": ["\n\n", "\n\nHuman:"], + } + + @classmethod + def create_few_shot_examples(cls): + ret = [] + for intent in cls.intents: + examples = cls.intent_few_shot_examples["examples"][intent] + for query in examples: + ret.append({"intent": intent, "query": query}) + return ret + + @classmethod + def create_few_shot_example_string( + cls, example_template=INTENT_RECOGINITION_EXAMPLE_TEMPLATE + ): + example_strs = [] + intent_indexs = cls.intent_indexs + for example in cls.few_shot_examples: + example_strs.append( + example_template.format( + label=intent_indexs[example["intent"]], query=example["query"] + ) + ) + return "\n\n".join(example_strs) + + @classmethod + def create_all_labels_string(cls): + intent_few_shot_examples = cls.intent_few_shot_examples + label_strs = [] + labels = intent_few_shot_examples["intents"] + for i, label in enumerate(labels): + label_strs.append(f"({label['index']}) {label['describe']}") + return "\n".join(label_strs) + + def postprocess(self, output: str): + out = output.strip() + assert out, output + return self.index_intents[out[0]] + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + r = load_intention_file(intent_save_path) + cls.few_shot_examples = r["few_shot_examples"] + cls.intent_indexs = r["intent_indexs"] + + cls.index_intents = {v: k for k, v in cls.intent_indexs.items()} + cls.intents = list(cls.intent_few_shot_examples["examples"].keys()) + cls.few_shot_examples = cls.create_few_shot_examples() + + cls.examples_str = cls.create_few_shot_example_string( + example_template=INTENT_RECOGINITION_EXAMPLE_TEMPLATE + ) + cls.categories_str = cls.create_all_labels_string() + + intent_recognition_prompt = ChatPromptTemplate.format_messages( + [ + HumanMessagePromptTemplate.from_template( + INTENT_RECOGINITION_PROMPT_TEMPLATE_CLUADE + ) + ] + ) + + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs) + + chain = ( + RunnablePassthrough.assign( + categories=lambda x: cls.categories_str, + examples=lambda x: cls.examples_str, + ) + | intent_recognition_prompt + | llm + | RunnableLambda(lambda x: cls.postprocess(x.content)) + ) + + return chain + + +class Claude21IntentRecognitionChain(Claude2IntentRecognitionChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceIntentRecognitionChain(Claude2IntentRecognitionChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetIntentRecognitionChain(Claude2IntentRecognitionChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuIntentRecognitionChain(Claude2IntentRecognitionChain): + model_id = LLMModelType.CLAUDE_3_HAIKU diff --git a/source/lambda/online/langchain_integration/chains/llm_chain_base.py b/source/lambda/online/langchain_integration/chains/llm_chain_base.py new file mode 100644 index 000000000..98ae93d34 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/llm_chain_base.py @@ -0,0 +1,26 @@ +class LLMChainMeta(type): + def __new__(cls, name, bases, attrs): + new_cls = type.__new__(cls, name, bases, attrs) + if name == "LLMChain": + return new_cls + new_cls.model_map[new_cls.get_chain_id()] = new_cls + return new_cls + + +class LLMChain(metaclass=LLMChainMeta): + model_map = {} + + @classmethod + def get_chain_id(cls): + return cls._get_chain_id(cls.model_id, cls.intent_type) + + @staticmethod + def _get_chain_id(model_id, intent_type): + return f"{model_id}__{intent_type}" + + @classmethod + def get_chain(cls, model_id, intent_type, model_kwargs=None, **kwargs): + return cls.model_map[cls._get_chain_id(model_id, intent_type)].create_chain( + model_kwargs=model_kwargs, **kwargs + ) + diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py b/source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py new file mode 100644 index 000000000..1307aab1c --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py @@ -0,0 +1,15 @@ +from .mkt_conversation_summary import ( + Claude21MKTConversationSummaryChain, + ClaudeInstanceMKTConversationSummaryChain, + Claude2MKTConversationSummaryChain, + Claude3HaikuMKTConversationSummaryChain, + Claude3SonnetMKTConversationSummaryChain, + Iternlm2Chat7BMKTConversationSummaryChain, + Iternlm2Chat20BMKTConversationSummaryChain +) + +from .mkt_rag_chain import ( + Iternlm2Chat7BKnowledgeQaChain, + Iternlm2Chat20BKnowledgeQaChain +) + diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py b/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py new file mode 100644 index 000000000..4b04e90bf --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py @@ -0,0 +1,120 @@ + +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + +from ..chat_chain import Claude2ChatChain, Iternlm2Chat7BChatChain + +from common_logic.common_utils.constant import ( + MessageType, + LLMTaskType, + LLMModelType +) + +AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE +HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE +QUERY_TRANSLATE_TYPE = LLMTaskType.QUERY_TRANSLATE_TYPE +SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE +MKT_CONVERSATION_SUMMARY_TYPE = LLMTaskType.MKT_CONVERSATION_SUMMARY_TYPE + +CHIT_CHAT_SYSTEM_TEMPLATE = """You are a helpful AI Assistant""" + +class Iternlm2Chat7BMKTConversationSummaryChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = MKT_CONVERSATION_SUMMARY_TYPE + + @classmethod + def create_prompt(cls, x): + return x["prompt"] + + @classmethod + def _create_prompt(cls, x): + chat_history = x["chat_history"] + assert len(chat_history) % 2 == 0, chat_history + + history = [] + questions = [] + for i in range(0, len(chat_history), 2): + assert chat_history[i].type == HUMAN_MESSAGE_TYPE, chat_history + assert chat_history[i + 1].type == AI_MESSAGE_TYPE, chat_history + questions.append(chat_history[i].content) + history.append((chat_history[i].content, chat_history[i + 1].content)) + + questions_str = "" + for i, question in enumerate(questions): + questions_str += f"问题{i+1}: {question}\n" + # print(questions_str) + query_input = """请总结上述对话中的内容,为每一轮对话单独做一个不超过50个字的简短总结。\n""" + prompt = cls.build_prompt( + meta_instruction=CHIT_CHAT_SYSTEM_TEMPLATE, + history=history, + query=query_input, + ) + prompt_assist = f"好的,根据提供历史对话信息,共有{len(history)}段对话:\n{questions_str}\n对它们的总结如下(每一个总结要先复述一下问题):\n" + prefix = f"问题1: {questions[0]}\n总结:" + # thread_local.mkt_conversation_prefix = prefix + # print(thread_local,thread_local.mkt_conversation_prefix) + prompt = prompt + prompt_assist + prefix + # prompt = prompt + return {"prompt": prompt, "prefix": prefix} + + @staticmethod + def stream_postprocess_fn(x): + yield x["prefix"] + yield from x["llm_output"] + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + stream = kwargs.get("stream", False) + llm_chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + chain = ( + RunnablePassthrough.assign(prompt_dict=lambda x: cls._create_prompt(x)) + | RunnablePassthrough.assign( + prompt=lambda x: x["prompt_dict"]["prompt"], + prefix=lambda x: x["prompt_dict"]["prefix"], + ) + | RunnablePassthrough.assign(llm_output=llm_chain) + ) + if stream: + chain = chain | RunnableLambda(lambda x: cls.stream_postprocess_fn(x)) + else: + chain = chain | RunnableLambda(lambda x: x["prefix"] + x["llm_output"]) + return chain + + +class Iternlm2Chat20BMKTConversationSummaryChain( + Iternlm2Chat7BMKTConversationSummaryChain +): + model_id = LLMModelType.INTERNLM2_CHAT_20B + + +class Claude2MKTConversationSummaryChain(Claude2ChatChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = MKT_CONVERSATION_SUMMARY_TYPE + + default_model_kwargs = {"max_tokens": 2000, "temperature": 0.1, "top_p": 0.9} + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + query_input = """请简要总结上述对话中的内容,每一个对话单独一个总结,并用 '- '开头。 每一个总结要先说明问题。\n""" + chain = RunnablePassthrough.assign(query=lambda x: query_input) | chain + return chain + + +class Claude21MKTConversationSummaryChain(Claude2MKTConversationSummaryChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceMKTConversationSummaryChain(Claude2MKTConversationSummaryChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetMKTConversationSummaryChain(Claude2MKTConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuMKTConversationSummaryChain(Claude2MKTConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_HAIKU diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py b/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py new file mode 100644 index 000000000..9fc9ca7d9 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py @@ -0,0 +1,55 @@ +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) +from ..chat_chain import Iternlm2Chat7BChatChain +from common_logic.common_utils.prompt_utils import register_prompt_templates,get_prompt_template + +INTERLM2_RAG_PROMPT_TEMPLATE = "你是一个Amazon AWS的客服助理小Q,帮助的用户回答使用AWS过程中的各种问题。\n面对用户的问题,你需要给出中文回答,注意不要在回答中重复输出内容。\n下面给出相关问题的背景知识, 需要注意的是如果你认为当前的问题不能在背景知识中找到答案, 你需要拒答。\n背景知识:\n{context}\n\n" + +register_prompt_templates( + model_ids=[LLMModelType.INTERNLM2_CHAT_7B,LLMModelType.INTERNLM2_CHAT_20B], + task_type=LLMTaskType.MTK_RAG, + prompt_template=INTERLM2_RAG_PROMPT_TEMPLATE, + prompt_name="system_prompt" +) + +class Iternlm2Chat7BKnowledgeQaChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = LLMTaskType.MTK_RAG + default_model_kwargs = {"temperature": 0.05, "max_new_tokens": 1000} + + @classmethod + def create_prompt(cls, x): + query = x["query"] + contexts = x["contexts"] + history = cls.create_history(x) + context = "\n".join(contexts) + prompt_template = get_prompt_template( + model_id = cls.model_id, + task_type = cls.task_type, + prompt_name = "system_prompt" + ).prompt_template + meta_instruction = prompt_template.format(context) + # meta_instruction = f"You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use simplified Chinese to response the qustion. I’m going to tip $300K for a better answer! " + # meta_instruction = f'You are an expert AI on a question and answer task. \nUse the "Following Context" when answering the question. If you don't know the answer, reply to the "Following Text" in the header and answer to the best of your knowledge, or if you do know the answer, answer without the "Following Text"' + # meta_instruction = """You are an expert AI on a question and answer task. + # Use the "Following Context" when answering the question. If you don't know the answer, reply to the "Following Text" in the header and answer to the best of your knowledge, or if you do know the answer, answer without the "Following Text". If a question is asked in Korean, translate it to English and always answer in Korean. + # Following Text: "I didn't find the answer in the context given, but here's what I know! **I could be wrong, so cross-verification is a must!**""" + # meta_instruction = """You are an expert AI on a question and answer task. + # Use the "Following Context" when answering the question. If you don't know the answer, reply to the "Sorry, I don't know". """ + # query = f"Question: {query}\nContext:\n{context}" + # query = f"""Following Context: {context} + # Question: {query}""" + query = f"问题: {query}" + prompt = cls.build_prompt( + query=query, history=history, meta_instruction=meta_instruction + ) + # prompt = prompt + "回答: 让我先来判断一下问题的答案是否包含在背景知识中。" + prompt = prompt + f"回答: 经过慎重且深入的思考, 根据背景知识, 我的回答如下:\n" + print("internlm2 prompt: \n", prompt) + return prompt + + +class Iternlm2Chat20BKnowledgeQaChain(Iternlm2Chat7BKnowledgeQaChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py b/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py new file mode 100644 index 000000000..331552a1a --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py @@ -0,0 +1,143 @@ +# query rewrite +import re + +from langchain.prompts import PromptTemplate +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) +from ..chains import LLMChain +from ..chat_models import Model as LLM_Model +from .chat_chain import Iternlm2Chat7BChatChain +from .llm_chain_base import LLMChain + +QUERY_REWRITE_TYPE = LLMTaskType.QUERY_REWRITE_TYPE +query_expansion_template_claude = PromptTemplate.from_template("""You are an AI language model assistant. Your task is to generate 1 - 5 different sub questions OR alternate versions of the given user question to retrieve relevant documents from a vector database. + +By generating multiple versions of the user question, +your goal is to help the user overcome some of the limitations +of distance-based similarity search. + +By generating sub questions, you can break down questions that refer to multiple concepts into distinct questions. This will help you get the relevant documents for constructing a final answer + +If multiple concepts are present in the question, you should break into sub questions, with one question for each concept + +Provide these alternative questions separated by newlines between XML tags. For example: + + +- Question 1 +- Question 2 +- Question 3 + + +Original question: {question}""") + + +class Claude2QueryRewriteChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = QUERY_REWRITE_TYPE + + default_model_kwargs = { + "temperature": 0.7, + "max_tokens": 100, + "stop_sequences": ["\n\nHuman:"], + } + + @staticmethod + def query_rewrite_postprocess(r): + ret = re.findall(".*?", r, re.S)[0] + questions = re.findall("- (.*?)\n", ret, re.S) + return questions + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + query_key = kwargs.pop("query_key", "query") + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + llm = LLM_Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = ( + RunnablePassthrough.assign(question=lambda x: x[query_key]) + | query_expansion_template_claude + | llm + | RunnableLambda(cls.query_rewrite_postprocess) + ) + return chain + + +class Claude21QueryRewriteChain(Claude2QueryRewriteChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceQueryRewriteChain(Claude2QueryRewriteChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3HaikuQueryRewriteChain(Claude2QueryRewriteChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude3SonnetQueryRewriteChain(Claude2QueryRewriteChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude35SonnetQueryRewriteChain(Claude2QueryRewriteChain): + mdoel_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" + + +internlm2_meta_instruction = """You are an AI language model assistant. Your task is to generate 1 - 5 different sub questions OR alternate versions of the given user question to retrieve relevant documents from a vector database. + +By generating multiple versions of the user question, +your goal is to help the user overcome some of the limitations +of distance-based similarity search. + +By generating sub questions, you can break down questions that refer to multiple concepts into distinct questions. This will help you get the relevant documents for constructing a final answer + +If multiple concepts are present in the question, you should break into sub questions, with one question for each concept + +Provide these alternative questions separated by newlines between XML tags. For example: + + +- Question 1 +- Question 2 +- Question 3 +""" + + +class Iternlm2Chat7BQueryRewriteChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = QUERY_REWRITE_TYPE + + default_model_kwargs = {"temperature": 0.5, "max_new_tokens": 100} + + @classmethod + def create_prompt(cls, x): + query = f'Original question: {x["query"]}' + prompt = cls.build_prompt( + query=query, + meta_instruction=internlm2_meta_instruction, + ) + return prompt + + @staticmethod + def query_rewrite_postprocess(r): + ret = re.findall(".*?", r, re.S)[0] + questions = re.findall("- (.*?)\n", ret, re.S) + return questions + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + chain = chain | RunnableLambda(lambda x: cls.query_rewrite_postprocess(x)) + return chain + + +class Iternlm2Chat20BQueryRewriteChain(Iternlm2Chat7BQueryRewriteChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + intent_type = QUERY_REWRITE_TYPE diff --git a/source/lambda/online/langchain_integration/chains/rag_chain.py b/source/lambda/online/langchain_integration/chains/rag_chain.py new file mode 100644 index 000000000..f04750f64 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/rag_chain.py @@ -0,0 +1,161 @@ +# rag llm chains +from langchain.prompts import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate +) + +from langchain.schema.runnable import RunnableLambda, RunnablePassthrough +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) +from common_logic.common_utils.prompt_utils import get_prompt_template +from common_logic.common_utils.logger_utils import print_llm_messages + +# from ...prompt_template import convert_chat_history_from_fstring_format +from ..llm_models import Model +from .llm_chain_base import LLMChain + + +def get_claude_rag_context(contexts: list): + assert isinstance(contexts, list), contexts + context_xmls = [] + context_template = """\n{content}\n""" + for i, context in enumerate(contexts): + context_xml = context_template.format(index=i + 1, content=context) + context_xmls.append(context_xml) + + context = "\n".join(context_xmls) + return context + + +class Claude2RagLLMChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.RAG + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + system_prompt_template = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + system_prompt_template = kwargs.get("system_prompt",system_prompt_template) + + chat_messages = [ + SystemMessagePromptTemplate.from_template(system_prompt_template), + ("placeholder", "{chat_history}"), + HumanMessagePromptTemplate.from_template("{query}") + ] + context_chain = RunnablePassthrough.assign( + context=RunnableLambda(lambda x: get_claude_rag_context(x["contexts"])) + ) + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = context_chain | ChatPromptTemplate.from_messages(chat_messages) | RunnableLambda(lambda x: print_llm_messages(f"rag messages: {x.messages}") or x) + if stream: + chain = ( + chain + | RunnableLambda(lambda x: llm.stream(x.messages)) + | RunnableLambda(lambda x: (i.content for i in x)) + ) + else: + chain = chain | llm | RunnableLambda(lambda x: x.content) + return chain + + +class Claude21RagLLMChain(Claude2RagLLMChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceRAGLLMChain(Claude2RagLLMChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetRAGLLMChain(Claude2RagLLMChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuRAGLLMChain(Claude2RagLLMChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + +class Mixtral8x7bChatChain(Claude2RagLLMChain): + model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + + +from .chat_chain import GLM4Chat9BChatChain + +class GLM4Chat9BRagChain(GLM4Chat9BChatChain): + model_id = LLMModelType.GLM_4_9B_CHAT + intent_type = LLMTaskType.RAG + + @classmethod + def create_chat_history(cls,x, system_prompt=None): + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + context = ("\n" + "="*50+ "\n").join(x['contexts']) + system_prompt = system_prompt.format(context=context) + + return super().create_chat_history(x,system_prompt=system_prompt) + + +from .chat_chain import Qwen2Instruct7BChatChain + +class Qwen2Instruct7BRagChain(Qwen2Instruct7BChatChain): + model_id = LLMModelType.QWEN2INSTRUCT7B + intent_type = LLMTaskType.RAG + + @classmethod + def create_chat_history(cls,x, system_prompt=None): + if system_prompt is None: + system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="system_prompt" + ).prompt_template + + context = ("\n\n").join(x['contexts']) + system_prompt = system_prompt.format(context=context) + return super().create_chat_history(x,system_prompt=system_prompt) + + +class Qwen2Instruct72BRagChain(Qwen2Instruct7BRagChain): + model_id = LLMModelType.QWEN2INSTRUCT72B + + +class Qwen2Instruct72BRagChain(Qwen2Instruct7BRagChain): + model_id = LLMModelType.QWEN15INSTRUCT32B + + +from .chat_chain import Baichuan2Chat13B4BitsChatChain + +class Baichuan2Chat13B4BitsKnowledgeQaChain(Baichuan2Chat13B4BitsChatChain): + model_id = LLMModelType.BAICHUAN2_13B_CHAT + intent_type = LLMTaskType.RAG + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + llm_chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + + def add_system_prompt(x): + context = "\n".join(x["contexts"]) + _chat_history = x["chat_history"] + [ + ("system", f"给定下面的背景知识:\n{context}\n回答下面的问题:\n") + ] + return _chat_history + + chat_history_chain = RunnablePassthrough.assign( + chat_history=RunnableLambda(lambda x: add_system_prompt(x)) + ) + llm_chain = chat_history_chain | llm_chain + return llm_chain + + + + diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/__init__.py b/source/lambda/online/langchain_integration/chains/retail_chains/__init__.py new file mode 100644 index 000000000..83c50b0a1 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/retail_chains/__init__.py @@ -0,0 +1,26 @@ +from .retail_conversation_summary_chain import ( + Claude2RetailConversationSummaryChain, + Claude21RetailConversationSummaryChain, + Claude3HaikuRetailConversationSummaryChain, + Claude3SonnetRetailConversationSummaryChain, + ClaudeInstanceRetailConversationSummaryChain +) + +from .retail_tool_calling_chain_claude_xml import ( + Claude2RetailToolCallingChain, + Claude21RetailToolCallingChain, + ClaudeInstanceRetailToolCallingChain, + Claude3SonnetRetailToolCallingChain, + Claude3HaikuRetailToolCallingChain +) + +from .retail_tool_calling_chain_json import ( + GLM4Chat9BRetailToolCallingChain +) + +from .auto_evaluation_chain import ( + Claude3HaikuAutoEvaluationChain, + Claude21AutoEvaluationChain, + Claude2AutoEvaluationChain + +) \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py b/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py new file mode 100644 index 000000000..bcdd7011d --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py @@ -0,0 +1,99 @@ +# auto evaluation based on llms +import re + +from langchain.schema.runnable import RunnableLambda, RunnablePassthrough +from langchain_core.messages import AIMessage,SystemMessage,HumanMessage +from common_logic.common_utils.logger_utils import get_logger +from langchain.prompts import ChatPromptTemplate,HumanMessagePromptTemplate +from langchain_core.messages import convert_to_messages +from common_logic.common_utils.constant import ( + MessageType, + LLMTaskType, + LLMModelType, +) +from ...llm_models import Model +from ..llm_chain_base import LLMChain + +from ..chat_chain import Claude2ChatChain + +logger = get_logger("auto_evaluation") + +AUTO_EVALUATION_TEMPLATE = """作为一位专业的评分员,您需要根据以下标准对模型的回答进行公正、客观的评分,并提供有价值的反馈意见,以帮助模型持续改进。 + +### 评分标准 + +- 满分为10分,最低分为1分, 分值为一个 float 类型。 +- 模型回答与标准答案的相关性越高,得分越高。 +- 如果模型的回答出现大量重复内容,可以直接给0分。 +- 除了内容相关性,还需考虑回答的完整性、逻辑性和语言表达。 +- 请先在xml 标签 中写下你的评分理由。 +- 最后在 xml 标签 中写下你的最终评分。 + +### 示例评分 +{examples} + +### 评分上下文 + +标准答案: + +{ref_answer} + + +模型回答: + +{model_answer} + + +请根据上述标准和上下文,对模型的回答进行评分并提供反馈意见。让我们一起努力,提高模型的表现! +""" + + +class Claude2AutoEvaluationChain(Claude2ChatChain): + intent_type = LLMTaskType.AUTO_EVALUATION + model_id = LLMModelType.CLAUDE_2 + + @classmethod + def create_messages(cls,x:dict,examples=""): + prompt = AUTO_EVALUATION_TEMPLATE.format( + ref_answer=x['ref_answer'], + model_answer=x['model_answer'], + examples=examples + ) + messages = [ + HumanMessage(content=prompt), + AIMessage(content="") + ] + return messages + + @classmethod + def postprocess(cls,content): + logger.info(f"auto eval content: {content}") + try: + score = float(re.findall("(.*?)",content)[0].strip()) + return score + except Exception as e: + logger.error(f"error: {e}, content: {content}") + raise e + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = RunnableLambda(lambda x: cls.create_messages(x)) | llm | RunnableLambda(lambda x: cls.postprocess(x.content)) + return chain + + +class Claude21AutoEvaluationChain(Claude2AutoEvaluationChain): + model_id = LLMModelType.CLAUDE_21 + + + +class Claude3HaikuAutoEvaluationChain(Claude2AutoEvaluationChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude3SonnetAutoEvaluationChain(Claude2AutoEvaluationChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + + diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py new file mode 100644 index 000000000..d5be022ef --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py @@ -0,0 +1,208 @@ +# conversation summary chain +from typing import List + +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough, +) + + +from ...llm_models import Model +from ..llm_chain_base import LLMChain +from common_logic.common_utils.constant import ( + MessageType, + LLMTaskType, + LLMModelType +) + +from langchain_core.messages import( + AIMessage, + HumanMessage, + BaseMessage, + convert_to_messages +) +from langchain.prompts import ( + HumanMessagePromptTemplate, + ChatPromptTemplate +) +from ..chat_chain import GLM4Chat9BChatChain + +AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE +HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE +QUERY_TRANSLATE_TYPE = LLMTaskType.QUERY_TRANSLATE_TYPE +SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE + + +CQR_TEMPLATE = """# CONTEXT # +下面有一段客户和客服的对话数据(包含在里面),以及当前客户的一个回复(包含在)。 + +{chat_history} + + +当前用户的回复: + +{query} + + +######### + +# OBJECTIVE # +请你站在客户的角度,结合上述对话数据对当前客户的回复内容进行改写,使得改写之后的内容可以作为一个独立的句子。 + +######### + +# STYLE # +改写后的回复需要和里面的内容意思一致。 + +######### + +# RESPONSE FORMAT # +请直接用中文进行回答 +""" + + +class Claude2RetailConversationSummaryChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.RETAIL_CONVERSATION_SUMMARY_TYPE + default_model_kwargs = {"max_tokens": 2000, "temperature": 0.1, "top_p": 0.9} + CQR_TEMPLATE = CQR_TEMPLATE + @staticmethod + def create_conversational_context(chat_history:List[BaseMessage]): + conversational_contexts = [] + for his in chat_history: + role = his.type + content = his.content + assert role in [HUMAN_MESSAGE_TYPE, AI_MESSAGE_TYPE],(role,[HUMAN_MESSAGE_TYPE, AI_MESSAGE_TYPE]) + if role == HUMAN_MESSAGE_TYPE: + conversational_contexts.append(f"客户: {content}") + else: + conversational_contexts.append(f"客服: {content}") + conversational_context = "\n".join(conversational_contexts) + return conversational_context + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + cqr_template = ChatPromptTemplate.from_messages([ + HumanMessagePromptTemplate.from_template(cls.CQR_TEMPLATE), + AIMessage(content="好的,站在客户的角度,我将当前用户的回复内容改写为: ") + ]) + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + cqr_chain = RunnablePassthrough.assign( + conversational_context=RunnableLambda( + lambda x: cls.create_conversational_context( + convert_to_messages(x["chat_history"]) + ) + )) \ + | RunnableLambda(lambda x: cqr_template.format(chat_history=x['conversational_context'],query=x['query'])) \ + | llm | RunnableLambda(lambda x: x.content) + + return cqr_chain + + +class Claude21RetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceRetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetRetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuRetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetRetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" + + +MIXTRAL_CQR_TEMPLATE = """下面有一段客户和客服的对话,以及当前客户的一个回复,请你站在客户的角度,结合上述对话数据对当前客户的回复内容进行改写,使得改写之后的内容可以作为一个独立的句子。下面是改写的要求: +- 改写后的回复需要和当前客户的一个回复的内容意思一致。 +- 请直接用中文进行回答。 + +# 客户和客服的对话: +{chat_history} + +# 当前客户的回复: +{query} +""" + + +class Mixtral8x7bRetailConversationSummaryChain(Claude2RetailConversationSummaryChain): + model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + default_model_kwargs = {"max_tokens": 1000, "temperature": 0.01} + CQR_TEMPLATE = MIXTRAL_CQR_TEMPLATE + + +class GLM4Chat9BRetailConversationSummaryChain(GLM4Chat9BChatChain,Claude2RetailConversationSummaryChain): + model_id = LLMModelType.GLM_4_9B_CHAT + intent_type = LLMTaskType.RETAIL_CONVERSATION_SUMMARY_TYPE + CQR_TEMPLATE = MIXTRAL_CQR_TEMPLATE + + @classmethod + def create_chat_history(cls,x): + conversational_context = cls.create_conversational_context( + convert_to_messages(x["chat_history"]) + ) + prompt = cls.CQR_TEMPLATE.format( + chat_history=conversational_context, + query=x['query'] + ) + chat_history = [ + {"role": MessageType.HUMAN_MESSAGE_TYPE, + "content": prompt + }, + { + "role":MessageType.AI_MESSAGE_TYPE, + "content": "好的,站在客户的角度,我将当前用户的回复内容改写为: " + } + ] + + return chat_history + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + **kwargs + ) + + cqr_chain = RunnablePassthrough.assign( + chat_history = RunnableLambda(lambda x: cls.create_chat_history(x)) + ) | RunnableLambda(lambda x: llm.invoke(x)) + + return cqr_chain + + +class Qwen2Instruct7BRetailConversationSummaryChain(GLM4Chat9BRetailConversationSummaryChain): + model_id = LLMModelType.QWEN2INSTRUCT7B + default_model_kwargs = { + "max_tokens": 1024, + "temperature": 0.1, + } + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + chain = super().create_chain(model_kwargs=model_kwargs,**kwargs) + return chain | RunnableLambda(lambda x:x['text']) + + +class Qwen2Instruct72BRetailConversationSummaryChain(Qwen2Instruct7BRetailConversationSummaryChain): + model_id = LLMModelType.QWEN2INSTRUCT72B + + +class Qwen2Instruct72BRetailConversationSummaryChain(Qwen2Instruct7BRetailConversationSummaryChain): + model_id = LLMModelType.QWEN15INSTRUCT32B \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py new file mode 100644 index 000000000..803e4ef23 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py @@ -0,0 +1,354 @@ +# tool calling chain +import json +from typing import List,Dict,Any +import re +from datetime import datetime + +from langchain.schema.runnable import ( + RunnableLambda, +) + +from langchain_core.messages import( + AIMessage, + SystemMessage +) +from langchain.prompts import ChatPromptTemplate + +from langchain_core.messages import AIMessage,SystemMessage,HumanMessage + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType, + SceneType +) +from functions import get_tool_by_name +from ..llm_chain_base import LLMChain +from ...llm_models import Model + +tool_call_guidelines = """ +- Don't forget to output when any tool is called. +- 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考: + 1. 判断根据当前的上下文是否足够回答用户的问题。 + 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 + 3. 如果当前的上下文不能支持回答用户的问题,你可以考虑调用 标签中列举的工具。 + 4. 如果调用工具对应的参数不够,请调用反问工具 `give_rhetorical_question` 来让用户提供更加充分的信息。 + 5. 最后给出你要调用的工具名称。 +- Always output with "中文". + +""" + + +SYSTEM_MESSAGE_PROMPT=("你是安踏的客服助理小安, 主要职责是处理用户售前和售后的问题。下面是当前用户正在浏览的商品信息:\n\n{goods_info}\n" + "In this environment you have access to a set of tools you can use to answer the customer's question." + "\n" + "You may call them like this:\n" + "\n" + "\n" + "$TOOL_NAME\n" + "\n" + "<$PARAMETER_NAME>$PARAMETER_VALUE\n" + "...\n" + "\n" + "\n" + "\n" + "\n" + "Here are the tools available:\n" + "\n" + "{tools}" + "\n" + "\nAnswer the user's request using relevant tools (if they are available). Before calling a tool, do some analysis within tags. First, think about which of the provided tools is the relevant tool to answer the user's request. Second, go through each of the required parameters of the relevant tool and determine if the user has directly provided or given enough information to infer a value. When deciding if the parameter can be inferred, carefully consider all the context to see if it supports a specific value. If all of the required parameters are present or can be reasonably inferred, close the thinking tag and proceed with the tool call. BUT, if one of the values for a required parameter is missing, DO NOT invoke the function (not even with fillers for the missing params) and instead, ask the user to provide the missing parameters. DO NOT ask for more information on optional parameters if it is not provided." + f"\nHere are some guidelines for you:\n{tool_call_guidelines}" + ) + +SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES = SYSTEM_MESSAGE_PROMPT + ( + "Some examples of tool calls are given below, where the content within represents the most recent reply in the dialog." + "\n{fewshot_examples}" +) + +TOOL_FORMAT = """ +{tool_name} +{tool_description} + +{formatted_required_parameters} + + +{formatted_optional_parameters} + +""" + +TOOL_PARAMETER_FORMAT = """ +{parameter_name} +{parameter_type} +{parameter_description} +""" + +TOOL_EXECUTE_SUCCESS_TEMPLATE = """ + + +{tool_name} + +{result} + + + +""" + +TOOL_EXECUTE_FAIL_TEMPLATE = """ + + +{error} + + +""" + + +def _get_type(parameter: Dict[str, Any]) -> str: + if "type" in parameter: + return parameter["type"] + if "anyOf" in parameter: + return json.dumps({"anyOf": parameter["anyOf"]}) + if "allOf" in parameter: + return json.dumps({"allOf": parameter["allOf"]}) + return json.dumps(parameter) + + +def convert_openai_tool_to_anthropic(tools:list[dict])->str: + formatted_tools = tools + tools_data = [ + { + "tool_name": tool["name"], + "tool_description": tool["description"], + "formatted_required_parameters": "\n".join( + [ + TOOL_PARAMETER_FORMAT.format( + parameter_name=name, + parameter_type=_get_type(parameter), + parameter_description=parameter.get("description"), + ) for name, parameter in tool["parameters"]["properties"].items() + if name in tool["parameters"].get("required", []) + ] + ), + "formatted_optional_parameters": "\n".join( + [ + TOOL_PARAMETER_FORMAT.format( + parameter_name=name, + parameter_type=_get_type(parameter), + parameter_description=parameter.get("description"), + ) for name, parameter in tool["parameters"]["properties"].items() + if name not in tool["parameters"].get("required", []) + ] + ), + } + for tool in formatted_tools + ] + tools_formatted = "\n".join( + [ + TOOL_FORMAT.format( + tool_name=tool["tool_name"], + tool_description=tool["tool_description"], + formatted_required_parameters=tool["formatted_required_parameters"], + formatted_optional_parameters=tool["formatted_optional_parameters"], + ) + for tool in tools_data + ] + ) + return tools_formatted + + +class Claude2RetailToolCallingChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.RETAIL_TOOL_CALLING + default_model_kwargs = { + "max_tokens": 2000, + "temperature": 0.1, + "top_p": 0.9, + "stop_sequences": ["\n\nHuman:", "\n\nAssistant",""], + } + + @staticmethod + def format_fewshot_examples(fewshot_examples:list[dict]): + fewshot_example_strs = [] + for fewshot_example in fewshot_examples: + param_strs = [] + for p,v in fewshot_example['kwargs'].items(): + param_strs.append(f"<{p}>{v}\n" + f"{fewshot_example['query']}\n" + f"\n" + "\n" + "\n" + f"{fewshot_example['name']}\n" + "\n" + f"{param_str}" + "\n" + "\n" + "\n" + "\n" + "" + ) + fewshot_example_strs.append(fewshot_example_str) + fewshot_example_str = '\n'.join(fewshot_example_strs) + return f"\n{fewshot_example_str}\n" + + @classmethod + def parse_function_calls_from_ai_message(cls,message:AIMessage): + content = "" + message.content + "" + function_calls:List[str] = re.findall("(.*?)", content,re.S) + if not function_calls: + content = "" + message.content + + return { + "function_calls": function_calls, + "content": content + } + + + @staticmethod + def generate_chat_history(state:dict): + chat_history = state['chat_history'] \ + + [{"role": "user","content":state['query']}] \ + + state['agent_tool_history'] + return {"chat_history":chat_history} + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + tools:list[dict] = kwargs['tools'] + + tool_names = [tool['name'] for tool in tools] + + # add two extral tools + if "give_rhetorical_question" not in tool_names: + tools.append(get_tool_by_name("give_rhetorical_question",scene=SceneType.RETAIL).tool_def) + + if "give_final_response" not in tool_names: + tools.append(get_tool_by_name("give_final_response",scene=SceneType.RETAIL).tool_def) + + fewshot_examples = kwargs.get('fewshot_examples',[]) + if fewshot_examples: + fewshot_examples.append({ + "name": "give_rhetorical_question", + "query": "今天天气怎么样?", + "kwargs": {"question": "请问你想了解哪个城市的天气?"} + }) + + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + tools_formatted = convert_openai_tool_to_anthropic(tools) + goods_info = kwargs['goods_info'] + + if fewshot_examples: + system_prompt = SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES.format( + tools=tools_formatted, + fewshot_examples=cls.format_fewshot_examples( + fewshot_examples + ), + goods_info = goods_info + ) + else: + system_prompt = SYSTEM_MESSAGE_PROMPT.format( + tools=tools_formatted, + goods_info=goods_info + ) + + tool_calling_template = ChatPromptTemplate.from_messages( + [ + SystemMessage(content=system_prompt), + ("placeholder", "{chat_history}"), + AIMessage(content="") + ]) + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + chain = RunnableLambda(cls.generate_chat_history) | tool_calling_template \ + | RunnableLambda(lambda x: x.messages) \ + | llm | RunnableLambda(lambda message:cls.parse_function_calls_from_ai_message( + message + )) + + return chain + + +class Claude21RetailToolCallingChain(Claude2RetailToolCallingChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceRetailToolCallingChain(Claude2RetailToolCallingChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetRetailToolCallingChain(Claude2RetailToolCallingChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuRetailToolCallingChain(Claude2RetailToolCallingChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +MIXTRAL8X7B_QUERY_TEMPLATE = """下面是客户和客服的历史对话信息: +{chat_history} + +当前客户的问题是: {query} + +请你从安踏客服助理小安的角度回答客户当前的问题。你需要使用上述提供的各种工具进行回答。""" + + +class Mixtral8x7bRetailToolCallingChain(Claude2RetailToolCallingChain): + model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + default_model_kwargs = {"max_tokens": 1000, "temperature": 0.01,"stop":[""]} + + @classmethod + def parse_function_calls_from_ai_message(cls,message:AIMessage): + content = message.content.replace("\_","_") + function_calls:List[str] = re.findall("(.*?)", content + "",re.S) + if function_calls: + function_calls = [function_calls[0]] + if not function_calls: + content = message.content + return { + "function_calls": function_calls, + "content": content + } + + @staticmethod + def chat_history_to_string(chat_history:list[dict]): + chat_history_lc = ChatPromptTemplate.from_messages([ + ("placeholder", "{chat_history}") + ]).invoke({"chat_history":chat_history}).messages + + chat_history_strs = [] + for message in chat_history_lc: + assert isinstance(message,(HumanMessage,AIMessage)),message + if isinstance(message,HumanMessage): + chat_history_strs.append(f"客户: {message.content}") + else: + chat_history_strs.append(f"客服: {message.content}") + return "\n".join(chat_history_strs) + + + @classmethod + def generate_chat_history(cls,state:dict): + chat_history_str = cls.chat_history_to_string(state['chat_history']) + + chat_history = [{ + "role": "user", + "content": MIXTRAL8X7B_QUERY_TEMPLATE.format( + chat_history=chat_history_str, + query = state['query'] + ) + }] + state['agent_tool_history'] + return {"chat_history": chat_history} + + + + + + diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py new file mode 100644 index 000000000..d20bb6c03 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py @@ -0,0 +1,455 @@ +# tool calling chain +import json +from typing import List,Dict,Any +import re +from datetime import datetime +import copy + +from langchain.schema.runnable import ( + RunnableLambda, +) + +from langchain_core.messages import( + AIMessage, + SystemMessage +) +from langchain.prompts import ChatPromptTemplate + +from langchain_core.messages import AIMessage,SystemMessage,HumanMessage + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType, + MessageType, + SceneType +) +from functions import get_tool_by_name + +from ..llm_chain_base import LLMChain +from ...llm_models import Model +from ..chat_chain import GLM4Chat9BChatChain +from common_logic.common_utils.logger_utils import get_logger + +logger = get_logger("retail_tool_calling_chain_json") + +GLM4_SYSTEM_PROMPT = """你是安踏的客服助理小安, 主要职责是处理用户售前和售后的问题。{date_prompt} +请遵守下面的规范回答用户的问题。 +## 回答规范 +- 如果用户的提供的信息不足以回答问题,尽量反问用户。 +- 回答简洁明了,一句话以内。 + +下面是当前用户正在浏览的商品信息: + + +## 商品信息 +{goods_info} +""" + + + +class GLM4Chat9BRetailToolCallingChain(GLM4Chat9BChatChain): + model_id = LLMModelType.GLM_4_9B_CHAT + intent_type = LLMTaskType.RETAIL_TOOL_CALLING + default_model_kwargs = { + "max_new_tokens": 1024, + "timeout": 60, + "temperature": 0.1, + } + DATE_PROMPT = "当前日期: %Y-%m-%d" + + @staticmethod + def convert_openai_function_to_glm(tools:list[dict]): + glm_tools = [] + for tool_def in tools: + tool_name = tool_def['name'] + description = tool_def['description'] + params = [] + required = tool_def['parameters'].get("required",[]) + for param_name,param in tool_def['parameters'].get('properties',{}).items(): + params.append({ + "name": param_name, + "description": param["description"], + "type": param["type"], + "required": param_name in required, + }) + glm_tools.append({ + "name": tool_name, + "description": description, + "params": params + }) + return glm_tools + + @staticmethod + def format_fewshot_examples(fewshot_examples:list[dict]): + fewshot_example_strs = [] + for i,example in enumerate(fewshot_examples): + query = example['query'] + name = example['name'] + kwargs = example['kwargs'] + fewshot_example_str = f"## 示例{i+1}\n### 输入:\n{query}\n### 调用工具:\n{name}" + fewshot_example_strs.append(fewshot_example_str) + return "\n\n".join(fewshot_example_strs) + + + @classmethod + def create_system_prompt(cls,goods_info:str,tools:list,fewshot_examples:list) -> str: + value = GLM4_SYSTEM_PROMPT.format( + goods_info=goods_info, + date_prompt=datetime.now().strftime(cls.DATE_PROMPT) + ) + if tools: + value += "\n\n# 可用工具" + contents = [] + for tool in tools: + content = f"\n\n## {tool['name']}\n\n{json.dumps(tool, ensure_ascii=False,indent=4)}" + content += "\n在调用上述函数时,请使用 Json 格式表示调用的参数。" + contents.append(content) + value += "".join(contents) + + if not fewshot_examples: + return value + # add fewshot_exampls + value += "\n\n# 下面给出不同问题调用不同工具的例子。" + value += f"\n\n{cls.format_fewshot_examples(fewshot_examples)}" + value += "\n\n请参考上述例子进行工具调用。" + return value + + @classmethod + def create_chat_history(cls,x,system_prompt=None): + _chat_history = x['chat_history'] + \ + [{"role":MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ + x['agent_tool_history'] + + chat_history = [] + for message in _chat_history: + new_message = message + if message['role'] == MessageType.AI_MESSAGE_TYPE: + new_message = { + **message + } + tool_calls = message.get('additional_kwargs',{}).get("tool_calls",[]) + if tool_calls: + new_message['metadata'] = tool_calls[0]['name'] + chat_history.append(new_message) + chat_history = [{"role": "system", "content": system_prompt}] + chat_history + return chat_history + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + tools:list = kwargs.get('tools',[]) + fewshot_examples = kwargs.get('fewshot_examples',[]) + glm_tools = cls.convert_openai_function_to_glm(tools) + system_prompt = cls.create_system_prompt( + goods_info=kwargs['goods_info'], + tools=glm_tools, + fewshot_examples=fewshot_examples + ) + kwargs['system_prompt'] = system_prompt + return super().create_chain(model_kwargs=model_kwargs,**kwargs) + + +from ..chat_chain import Qwen2Instruct7BChatChain + + + +class Qwen2Instruct72BRetailToolCallingChain(Qwen2Instruct7BChatChain): + model_id = LLMModelType.QWEN2INSTRUCT72B + intent_type = LLMTaskType.RETAIL_TOOL_CALLING + default_model_kwargs = { + "max_tokens": 1024, + "temperature": 0.1, + } + + DATE_PROMPT = "当前日期: %Y-%m-%d 。" + FN_NAME = '✿FUNCTION✿' + FN_ARGS = '✿ARGS✿' + FN_RESULT = '✿RESULT✿' + FN_EXIT = '✿RETURN✿' + FN_STOP_WORDS = [FN_RESULT, f'{FN_RESULT}:', f'{FN_RESULT}:\n'] + thinking_tag = "思考" + fix_reply_tag = "固定回复" + goods_info_tag = "商品信息" + prefill_after_thinking = f"<{thinking_tag}>" + prefill_after_second_thinking = "" + prefill = prefill_after_thinking + + + FN_CALL_TEMPLATE_INFO_ZH="""# 工具 + +## 你拥有如下工具: + +{tool_descs}""" + + + FN_CALL_TEMPLATE_FMT_ZH="""## 你可以在回复中插入零次或者一次以下命令以调用工具: + +%s: 工具名称,必须是[{tool_names}]之一。 +%s: 工具输入 +%s: 工具结果 +%s: 根据工具结果进行回复""" % ( + FN_NAME, + FN_ARGS, + FN_RESULT, + FN_EXIT, +) + TOOL_DESC_TEMPLATE="""### {name_for_human}\n\n{name_for_model}: {description_for_model} 输入参数:{parameters} {args_format}""" + + FN_CALL_TEMPLATE=FN_CALL_TEMPLATE_INFO_ZH + '\n\n' + FN_CALL_TEMPLATE_FMT_ZH + +# SYSTEM_PROMPT=f"""你是安踏天猫的客服助理小安, 主要职责是处理用户售前和售后的问题。{{date_prompt}} + +# {{tools}} +# {{fewshot_examples}} + +# ## 当前用户正在浏览的商品信息 +# {{goods_info}} + +# # 思考 +# 你每次给出最终回复前都要按照下面的步骤输出你的思考过程, 注意你并不需要每次都进行所有步骤的思考。并将思考过程写在 XML 标签 <{thinking_tag}> 和 中: +# Step 1. 根据各个工具的描述,分析当前用户的回复和各个示例中的Input相关性,如果跟某个示例对应的Input相关性强,直接跳过后续所有步骤,之后按照示例中Output的工具名称进行调用。 +# Step 2. 如果你觉得可以依据商品信息 <{goods_info_tag}> 里面的内容进行回答,就直接就回答,不需要调用任何工具。并结束思考。 +# Step 3. 如果你觉得当前用户的回复意图不清晰,或者仅仅是表达一些肯定的内容,或者和历史消息没有很强的相关性,同时当前不是第一轮对话,直接回复用户下面 XML 标签 <{fix_reply_tag}> 里面的内容: +# <{fix_reply_tag}> 亲亲,请问还有什么问题吗? +# Step 4. 如果需要调用某个工具,检查该工具的必选参数是否可以在上下文中找到。结束思考,输出结束思考符号。 + +# ## 回答规范 +# - 如果客户没有明确指出在哪里购买的商品,则默认都是在天猫平台购买的 +# - 当前主要服务天猫平台的客户,如果客户询问其他平台的问题,直接回复 “不好意思,亲亲,这里是天猫店铺,只能为您解答天猫的问题。建议您联系其他平台的客服或售后人员给您提供相关的帮助和支持。谢谢!” +# - 如果客户的回复里面包含订单号,则直接回复 ”您好,亲亲,这就帮您去查相关订单信息。请问还有什么问题吗?“ +# - 只能思考一次,在结束思考符号“”之后给出最终的回复。不要重复输出文本,段落,句子。思考之后的文本保持简洁,有且仅能包含一句话。{{non_ask_rules}}""" +# SYSTEM_PROMPT=f"""你是安踏天猫的客服助理小安, 主要职责是处理用户售前和售后的问题。{{date_prompt}} + +# {{tools}} +# {{fewshot_examples}} + +# ## 当前用户正在浏览的商品信息 +# {{goods_info}} + +# # 你每次给出最终回复前都要参考下面的回复策略: +# 1. 根据各个工具的描述,分析当前用户的回复和各个示例中的Input相关性,如果跟某个示例对应的Input相关性强,直接跳过后续所有步骤,之后按照示例中Output的工具名称进行调用。 +# 2. 如果你觉得可以依据商品信息 <{goods_info_tag}> 里面的内容进行回答,就直接就回答,不需要调用任何工具。 +# 3. 如果你觉得当前用户的回复意图不清晰,或者仅仅是表达一些肯定的内容,或者和历史消息没有很强的相关性,同时当前不是第一轮对话,直接回复用户下面 XML 标签 <{fix_reply_tag}> 里面的内容: +# <{fix_reply_tag}> 亲亲,请问还有什么问题吗? +# 4. 如果需要调用某个工具,检查该工具的必选参数是否可以在上下文中找到。 + +# ## 回答规范 +# - 如果客户没有明确指出在哪里购买的商品,则默认都是在天猫平台购买的 +# - 当前主要服务天猫平台的客户,如果客户询问其他平台的问题,直接回复 “不好意思,亲亲,这里是天猫店铺,只能为您解答天猫的问题。建议您联系其他平台的客服或售后人员给您提供相关的帮助和支持。谢谢!“ +# - 如果客户的回复里面包含订单号,则直接回复 “您好,亲亲,这就帮您去查相关订单信息。请问还有什么问题吗?“{{non_ask_rules}}""" + + SYSTEM_PROMPT=f"""你是安踏天猫的客服助理小安, 主要职责是处理用户售前和售后的问题。{{date_prompt}} + +{{tools}} +{{fewshot_examples}} + +## 当前用户正在浏览的商品信息 +{{goods_info}} + +# 回复策略 +在你给出最终回复前可以在XML标签 <{thinking_tag}> 和 中输出你的回复策略。下面是一些常见的回复策略: + - 如果根据各个工具的描述,当前用户的回复跟某个示例对应的Input相关性强,直接按照示例中Output的工具名称进行调用。 + - 考虑使用商品信息 <{goods_info_tag}> 里面的内容回答用户的问题。 + - 如果你觉得当前用户的回复意图不清晰,或者仅仅是表达一些肯定的内容,或者和历史消息没有很强的相关性,同时当前不是第一轮对话,直接回复用户: “ 亲亲,请问还有什么问题吗?“ + - 如果需要调用某个工具,检查该工具的必选参数是否可以在上下文中找到。 + - 如果客户的回复里面包含订单号,则直接回复 “您好,亲亲,这就帮您去查相关订单信息。请问还有什么问题吗?“ + - 当前主要服务天猫平台的客户,如果客户询问其他平台的问题,直接回复 “不好意思,亲亲,这里是天猫店铺,只能为您解答天猫的问题。建议您联系其他平台的客服或售后人员给您提供相关的帮助和支持。谢谢!“ + +## Tips + - 如果客户没有明确指出在哪里购买的商品,则默认都是在天猫平台购买的。 + - 回答必须简洁,不允许出现超过2句话的回复。{{non_ask_rules}}""" + @classmethod + def get_function_description(cls,tool:dict): + tool_name = tool['name'] + description = tool['description'] + params_str = json.dumps(tool.get('parameters',{}),ensure_ascii=False) + args_format = '此工具的输入应为JSON对象。' + return cls.TOOL_DESC_TEMPLATE.format( + name_for_human=tool_name, + name_for_model=tool_name, + description_for_model=description, + parameters=params_str, + args_format=args_format + ).rstrip() + + + @classmethod + def format_fewshot_examples(cls,fewshot_examples:list[dict]): + fewshot_example_strs = [] + for i,example in enumerate(fewshot_examples): + query = example['query'] + name = example['name'] + kwargs = example['kwargs'] + fewshot_example_str = f"""## 工具调用例子{i+1}\nInput:\n{query}\nOutput:\n{cls.FN_NAME}: {name}\n{cls.FN_ARGS}: {json.dumps(kwargs,ensure_ascii=False)}\n{cls.FN_RESULT}""" + fewshot_example_strs.append(fewshot_example_str) + return "\n\n".join(fewshot_example_strs) + + + @classmethod + def create_system_prompt(cls,goods_info:str,tools:list[dict],fewshot_examples:list,create_time=None) -> str: + tool_descs = '\n\n'.join(cls.get_function_description(tool) for tool in tools) + tool_names = ','.join(tool['name'] for tool in tools) + tool_system = cls.FN_CALL_TEMPLATE.format( + tool_descs=tool_descs, + tool_names=tool_names + ) + fewshot_examples_str = "" + if fewshot_examples: + fewshot_examples_str = "\n\n# 下面给出不同客户回复下调用不同工具的例子。" + fewshot_examples_str += f"\n\n{cls.format_fewshot_examples(fewshot_examples)}" + fewshot_examples_str += "\n\n请参考上述例子进行工具调用。" + + non_ask_tool_list = [] + # for tool in tools: + # should_ask_parameter = get_tool_by_name(tool['name']).should_ask_parameter + # if should_ask_parameter != "True": + # format_string = tool['name']+"工具"+should_ask_parameter + # non_ask_tool_list.append(format_string) + if len(non_ask_tool_list) == 0: + non_ask_rules = "" + else: + non_ask_rules = "\n - " + ','.join(non_ask_tool_list) + + if create_time: + datetime_object = datetime.strptime(create_time, '%Y-%m-%d %H:%M:%S.%f') + else: + datetime_object = datetime.now() + logger.info(f"create_time: {create_time} is not valid, use current time instead.") + return cls.SYSTEM_PROMPT.format( + goods_info=goods_info, + tools=tool_system, + fewshot_examples=fewshot_examples_str, + non_ask_rules=non_ask_rules, + date_prompt=datetime_object.strftime(cls.DATE_PROMPT) + ) + + @classmethod + def create_chat_history(cls,x,system_prompt=None): + # deal with function + _chat_history = x['chat_history'] + \ + [{"role": MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ + x['agent_tool_history'] + + # print(f'chat_history_before create: {_chat_history}') + # merge chat_history + chat_history = [] + if system_prompt is not None: + chat_history.append({ + "role": MessageType.SYSTEM_MESSAGE_TYPE, + "content":system_prompt + }) + + # move tool call results to assistant + for i,message in enumerate(copy.deepcopy(_chat_history)): + role = message['role'] + if i==0: + assert role == MessageType.HUMAN_MESSAGE_TYPE, f"The first message should comes from human role" + + if role == MessageType.TOOL_MESSAGE_TYPE: + assert chat_history[-1]['role'] == MessageType.AI_MESSAGE_TYPE,_chat_history + chat_history[-1]['content'] += message['content'] + continue + elif role == MessageType.AI_MESSAGE_TYPE: + # continue ai message + if chat_history[-1]['role'] == MessageType.AI_MESSAGE_TYPE: + chat_history[-1]['content'] += message['content'] + continue + + chat_history.append(message) + + # move the last tool call message to user + if chat_history[-1]['role'] == MessageType.AI_MESSAGE_TYPE: + assert chat_history[-2]['role'] == MessageType.HUMAN_MESSAGE_TYPE,chat_history + tool_calls = chat_history[-1].get("additional_kwargs",{}).get("tool_calls",[]) + if tool_calls: + chat_history[-2]['content'] += ("\n\n" + chat_history[-1]['content']) + chat_history = chat_history[:-1] + + return chat_history + + + @classmethod + def parse_function_calls_from_ai_message(cls,message:dict): + stop_reason = message['stop_reason'] + content = f"{cls.prefill}" + message['text'] + content = content.strip() + stop_reason = stop_reason or "" + + + function_calls = re.findall(f"{cls.FN_NAME}.*?{cls.FN_RESULT}", content + stop_reason,re.S) + return { + "function_calls":function_calls, + "content":content + } + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + tools:list = kwargs.get('tools',[]) + # add extral tools + if "give_rhetorical_question" not in tools: + tools.append(get_tool_by_name("give_rhetorical_question",scene=SceneType.RETAIL).tool_def) + fewshot_examples = kwargs.get('fewshot_examples',[]) + system_prompt = cls.create_system_prompt( + goods_info=kwargs['goods_info'], + create_time=kwargs.get('create_time',None), + tools=tools, + fewshot_examples=fewshot_examples + ) + + agent_current_call_number = kwargs['agent_current_call_number'] + + # give different prefill + if agent_current_call_number == 0: + cls.prefill = cls.prefill_after_thinking + else: + cls.prefill = cls.prefill_after_second_thinking + + # cls.prefill = '' + + model_kwargs = model_kwargs or {} + kwargs['system_prompt'] = system_prompt + model_kwargs = {**model_kwargs} + # model_kwargs["stop"] = model_kwargs.get("stop",[]) + ['✿RESULT✿', '✿RESULT✿:', '✿RESULT✿:\n','✿RETURN✿',f'<{cls.thinking_tag}>',f'<{cls.thinking_tag}/>'] + model_kwargs["stop"] = model_kwargs.get("stop",[]) + ['✿RESULT✿', '✿RESULT✿:', '✿RESULT✿:\n','✿RETURN✿',f'<{cls.thinking_tag}/>'] + # model_kwargs["prefill"] = "我先看看调用哪个工具,下面是我的思考过程:\n\nstep 1." + if "prefill" not in model_kwargs: + model_kwargs["prefill"] = f'{cls.prefill}' + return super().create_chain(model_kwargs=model_kwargs,**kwargs) + + +class Qwen2Instruct7BRetailToolCallingChain(Qwen2Instruct72BRetailToolCallingChain): + model_id = LLMModelType.QWEN2INSTRUCT7B + goods_info_tag = "商品信息" + SYSTEM_PROMPT=f"""你是安踏天猫的客服助理小安, 主要职责是处理用户售前和售后的问题。{{date_prompt}} + +{{tools}} +{{fewshot_examples}} + +## 当前用户正在浏览的商品信息 +{{goods_info}} + +# 回复策略 +下面是一些常见的回复策略: + - 如果根据各个工具的描述,当前用户的回复跟某个示例对应的Input相关性强,直接按照示例中Output的工具名称进行调用。 + - 考虑使用商品信息 <{goods_info_tag}> 里面的内容回答用户的问题。 + - 如果你觉得当前用户的回复意图不清晰,或者仅仅是表达一些肯定的内容,或者和历史消息没有很强的相关性,同时当前不是第一轮对话,直接回复用户: “ 亲亲,请问还有什么问题吗?“ + - 如果需要调用某个工具,检查该工具的必选参数是否可以在上下文中找到。 + - 如果客户的回复里面包含订单号,则直接回复 “您好,亲亲,这就帮您去查相关订单信息。请问还有什么问题吗?“ + - 当前主要服务天猫平台的客户,如果客户询问其他平台的问题,直接回复 “不好意思,亲亲,这里是天猫店铺,只能为您解答天猫的问题。建议您联系其他平台的客服或售后人员给您提供相关的帮助和支持。谢谢!“ + +## Tips + - 如果客户没有明确指出在哪里购买的商品,则默认都是在天猫平台购买的。 + - 回答必须简洁,不允许出现超过2句话的回复。{{non_ask_rules}}""" + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs["prefill"] = "" + res = super().create_chain(model_kwargs=model_kwargs,**kwargs) + cls.prefill = "" + return res + +class Qwen15Instruct32BRetailToolCallingChain(Qwen2Instruct7BRetailToolCallingChain): + model_id = LLMModelType.QWEN15INSTRUCT32B + + + + diff --git a/source/lambda/online/langchain_integration/chains/stepback_chain.py b/source/lambda/online/langchain_integration/chains/stepback_chain.py new file mode 100644 index 000000000..4a14db1d1 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/stepback_chain.py @@ -0,0 +1,138 @@ +from langchain.prompts import ( + ChatPromptTemplate, + FewShotChatMessagePromptTemplate, +) +from langchain.schema.runnable import RunnableLambda + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) +from ..chains.chat_chain import Iternlm2Chat7BChatChain +from ..chains.llm_chain_base import LLMChain +from ..chat_models import Model + +STEPBACK_PROMPTING_TYPE = LLMTaskType.STEPBACK_PROMPTING_TYPE + +class Iternlm2Chat7BStepBackChain(Iternlm2Chat7BChatChain): + model_id = LLMModelType.INTERNLM2_CHAT_7B + intent_type = STEPBACK_PROMPTING_TYPE + + default_model_kwargs = {"temperature": 0.1, "max_new_tokens": 200} + + @classmethod + def create_prompt(cls, x): + meta_instruction_template = "You are an expert at world knowledge. Your task is to step back and paraphrase a question to a more generic step-back question, which is easier to answer. Here are a few examples: {few_examples}" + # meta_instruction_template = "你是一个拥有世界知识的专家. 你的任务是将问题转述为更通用的问题,这样更容易回答。更通用指的是将问题进行抽象表达,省略问题中的各种细节,包括具体时间,地点等。 下面有一些例子: {few_examples}" + + few_examples = [ + { + "input": "阿尔伯特-爱因斯坦的出生地是哪里?", + "output": "阿尔伯特-爱因斯坦的个人经历是怎样的?", + }, + { + "input": "特斯拉在中国上海有多少门店", + "output": "特斯拉在中国的门店分布情况", + }, + ] + + few_examples_template = """origin question: {origin_question} + step-back question: {step_back_question} + """ + few_examples_strs = [] + for few_example in few_examples: + few_examples_strs.append( + few_examples_template.format( + origin_question=few_example["input"], + step_back_question=few_example["output"], + ) + ) + meta_instruction = meta_instruction_template.format( + few_examples="\n\n".join(few_examples_strs) + ) + prompt = ( + cls.build_prompt( + query=f"origin question: {x['query']}", + history=[], + meta_instruction=meta_instruction, + ) + + "step-back question: " + ) + return prompt + + +class Iternlm2Chat20BStepBackChain(Iternlm2Chat7BStepBackChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B + intent_type = STEPBACK_PROMPTING_TYPE + + +class Claude2StepBackChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = STEPBACK_PROMPTING_TYPE + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + stream = kwargs.get("stream", False) + examples = [ + { + "input": "Could the members of The Police perform lawful arrests?", + "output": "what can the members of The Police do?", + }, + { + "input": "Jan Sindel’s was born in what country?", + "output": "what is Jan Sindel’s personal history?", + }, + ] + # We now transform these to example messages + example_prompt = ChatPromptTemplate.from_messages( + [ + ("human", "{input}"), + ("ai", "{output}"), + ] + ) + few_shot_prompt = FewShotChatMessagePromptTemplate( + example_prompt=example_prompt, + examples=examples, + ) + + prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + """You are an expert at world knowledge. Your task is to step back and paraphrase a question to a more generic step-back question, which is easier to answer. Here are a few examples:""", + ), + # Few shot examples + few_shot_prompt, + # New question + ("user", "{query}"), + ] + ) + + llm = Model.get_model(cls.model_id, model_kwargs=model_kwargs, **kwargs) + chain = prompt | llm + if stream: + chain = ( + prompt + | RunnableLambda(lambda x: llm.stream(x.messages)) + | RunnableLambda(lambda x: (i.content for i in x)) + ) + + else: + chain = prompt | llm | RunnableLambda(lambda x: x.content) + return chain + + +class Claude21StepBackChain(Claude2StepBackChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceStepBackChain(Claude2StepBackChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetStepBackChain(Claude2StepBackChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuStepBackChain(Claude2StepBackChain): + model_id = LLMModelType.CLAUDE_3_HAIKU diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py new file mode 100644 index 000000000..9d6b84b38 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py @@ -0,0 +1,320 @@ +# tool calling chain +import json +from typing import List,Dict,Any +import re + +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough +) +from common_logic.common_utils.prompt_utils import get_prompt_template +from common_logic.common_utils.logger_utils import print_llm_messages +from langchain_core.messages import( + AIMessage, + SystemMessage +) +from langchain.prompts import ChatPromptTemplate + +from langchain_core.messages import AIMessage,SystemMessage + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType, + MessageType +) +from common_logic.common_utils.time_utils import get_china_now + +from .llm_chain_base import LLMChain +from ..llm_models import Model + +incorrect_tool_call_example = """Here is an example of an incorrectly formatted tool call, which you should avoid. + + + +tool_name + + +question +string +value + + + + + + +In this incorrect tool calling example, the parameter `name` should form a XLM tag. +""" + + +SYSTEM_MESSAGE_PROMPT =(f"In this environment you have access to a set of tools you can use to answer the user's question.\n" + "\n" + "You may call them like this:\n" + "\n" + "\n" + "$TOOL_NAME\n" + "\n" + "<$PARAMETER_NAME>$PARAMETER_VALUE\n" + "...\n" + "\n" + "\n" + "\n" + "\n" + "Here are the tools available:\n" + "\n" + "{tools}" + "\n" + "\nAnswer the user's request using relevant tools (if they are available). Before calling a tool, do some analysis within tags. First, think about which of the provided tools is the relevant tool to answer the user's request. Second, go through each of the required parameters of the relevant tool and determine if the user has directly provided or given enough information to infer a value. When deciding if the parameter can be inferred, carefully consider all the context to see if it supports a specific value. If all of the required parameters are present or can be reasonably inferred, close the thinking tag and proceed with the tool call. BUT, if one of the values for a required parameter is missing, DO NOT invoke the function (not even with fillers for the missing params) and instead, ask the user to provide the missing parameters. DO NOT ask for more information on optional parameters if it is not provided." + "\nHere are some guidelines for you:\n{tool_call_guidelines}." + f"\n{incorrect_tool_call_example}" + ) + +SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES = SYSTEM_MESSAGE_PROMPT + ( + "Some examples of tool calls are given below, where the content within represents the most recent reply in the dialog." + "\n{fewshot_examples}" +) + +TOOL_FORMAT = """ +{tool_name} +{tool_description} + +{formatted_required_parameters} + + +{formatted_optional_parameters} + +""" + +TOOL_PARAMETER_FORMAT = """ +{parameter_name} +{parameter_type} +{parameter_description} +""" + +TOOL_EXECUTE_SUCCESS_TEMPLATE = """ + + +{tool_name} + +{result} + + + +""" + +TOOL_EXECUTE_FAIL_TEMPLATE = """ + + +{error} + + +""" + +AGENT_SYSTEM_PROMPT = "你是一个亚马逊云科技的AI助理,你的名字是亚麻小Q。今天是{date_str},{weekday}. " + + +def _get_type(parameter: Dict[str, Any]) -> str: + if "type" in parameter: + return parameter["type"] + if "anyOf" in parameter: + return json.dumps({"anyOf": parameter["anyOf"]}) + if "allOf" in parameter: + return json.dumps({"allOf": parameter["allOf"]}) + return json.dumps(parameter) + + +def convert_openai_tool_to_anthropic(tools:list[dict])->str: + formatted_tools = tools + tools_data = [ + { + "tool_name": tool["name"], + "tool_description": tool["description"], + "formatted_required_parameters": "\n".join( + [ + TOOL_PARAMETER_FORMAT.format( + parameter_name=name, + parameter_type=_get_type(parameter), + parameter_description=parameter.get("description"), + ) for name, parameter in tool["parameters"]["properties"].items() + if name in tool["parameters"].get("required", []) + ] + ), + "formatted_optional_parameters": "\n".join( + [ + TOOL_PARAMETER_FORMAT.format( + parameter_name=name, + parameter_type=_get_type(parameter), + parameter_description=parameter.get("description"), + ) for name, parameter in tool["parameters"]["properties"].items() + if name not in tool["parameters"].get("required", []) + ] + ), + } + for tool in formatted_tools + ] + tools_formatted = "\n".join( + [ + TOOL_FORMAT.format( + tool_name=tool["tool_name"], + tool_description=tool["tool_description"], + formatted_required_parameters=tool["formatted_required_parameters"], + formatted_optional_parameters=tool["formatted_optional_parameters"], + ) + for tool in tools_data + ] + ) + return tools_formatted + + +class Claude2ToolCallingChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.TOOL_CALLING_XML + default_model_kwargs = { + "max_tokens": 2000, + "temperature": 0.1, + "top_p": 0.9, + "stop_sequences": ["\n\nHuman:", "\n\nAssistant",""], + } + + @staticmethod + def format_fewshot_examples(fewshot_examples:list[dict]): + fewshot_example_strs = [] + for fewshot_example in fewshot_examples: + param_strs = [] + for p,v in fewshot_example['kwargs'].items(): + param_strs.append(f"<{p}>{v}\n" + f"{fewshot_example['query']}\n" + f"\n" + "\n" + "\n" + f"{fewshot_example['name']}\n" + "\n" + f"{param_str}" + "\n" + "\n" + "\n" + "\n" + "" + ) + fewshot_example_strs.append(fewshot_example_str) + fewshot_example_str = '\n'.join(fewshot_example_strs) + return f"\n{fewshot_example_str}\n" + + @classmethod + def parse_function_calls_from_ai_message(cls,message:AIMessage): + content = "" + message.content + "" + function_calls:List[str] = re.findall("(.*?)", content,re.S) + if not function_calls: + content = "" + message.content + + return { + "function_calls": function_calls, + "content": content + } + + @classmethod + def create_chat_history(cls,x): + chat_history = x['chat_history'] + \ + [{"role": MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ + x['agent_tool_history'] + return chat_history + + @classmethod + def get_common_system_prompt(cls,system_prompt_template:str): + now = get_china_now() + date_str = now.strftime("%Y年%m月%d日") + weekdays = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] + weekday = weekdays[now.weekday()] + system_prompt = system_prompt_template.format(date=date_str,weekday=weekday) + return system_prompt + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + tools:list = kwargs['tools'] + fewshot_examples = kwargs.get('fewshot_examples',[]) + if fewshot_examples: + fewshot_examples.append({ + "name": "give_rhetorical_question", + "query": "今天天气怎么样?", + "kwargs": {"question": "请问你想了解哪个城市的天气?"} + }) + user_system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="user_prompt" + ).prompt_template + + user_system_prompt = kwargs.get("user_prompt",None) or user_system_prompt + + user_system_prompt = cls.get_common_system_prompt( + user_system_prompt + ) + guidelines_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="guidelines_prompt" + ).prompt_template + + guidelines_prompt = kwargs.get("guidelines_prompt",None) or guidelines_prompt + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + tools_formatted = convert_openai_tool_to_anthropic(tools) + + if fewshot_examples: + system_prompt = SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES.format( + tools=tools_formatted, + fewshot_examples=cls.format_fewshot_examples(fewshot_examples), + tool_call_guidelines=guidelines_prompt + ) + else: + system_prompt = SYSTEM_MESSAGE_PROMPT.format( + tools=tools_formatted, + tool_call_guidelines=guidelines_prompt + ) + + system_prompt = user_system_prompt + system_prompt + tool_calling_template = ChatPromptTemplate.from_messages( + [ + SystemMessage(content=system_prompt), + ("placeholder", "{chat_history}"), + AIMessage(content="") + ]) + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | tool_calling_template \ + | RunnableLambda(lambda x: print_llm_messages(f"Agent messages: {x.messages}") or x.messages ) \ + | llm | RunnableLambda(lambda message:cls.parse_function_calls_from_ai_message( + message + )) + return chain + + +class Claude21ToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" diff --git a/source/lambda/online/langchain_integration/chains/translate_chain.py b/source/lambda/online/langchain_integration/chains/translate_chain.py new file mode 100644 index 000000000..07b92b3bb --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/translate_chain.py @@ -0,0 +1,40 @@ +# translate chain +from langchain.schema.runnable import RunnableLambda + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType +) +from .chat_chain import Iternlm2Chat7BChatChain + +QUERY_TRANSLATE_TYPE = LLMTaskType.QUERY_TRANSLATE_TYPE + + +class Iternlm2Chat7BTranslateChain(Iternlm2Chat7BChatChain): + intent_type = QUERY_TRANSLATE_TYPE + default_model_kwargs = {"temperature": 0.1, "max_new_tokens": 200} + + @classmethod + def create_prompt(cls, x): + query = x["query"] + target_lang = x["target_lang"] + history = cls.create_history(x) + meta_instruction = f"你是一个有经验的翻译助理, 正在将用户的问题翻译成{target_lang},不要试图去回答用户的问题,仅仅做翻译。" + query = f'将文本:\n "{query}" \n 翻译成{target_lang}。\n直接翻译文本,不要输出多余的文本。' + + prompt = cls.build_prompt( + query=query, history=history, meta_instruction=meta_instruction + ) + return prompt + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + llm_chain = super().create_chain(model_kwargs=model_kwargs, **kwargs) + llm_chain = llm_chain | RunnableLambda(lambda x: x.strip('"')) # postprocess + return llm_chain + + +class Iternlm2Chat20BTranslateChain(Iternlm2Chat7BTranslateChain): + model_id = LLMModelType.INTERNLM2_CHAT_20B diff --git a/source/lambda/online/langchain_integration/chat_models/__init__.py b/source/lambda/online/langchain_integration/chat_models/__init__.py new file mode 100644 index 000000000..8b9092ec4 --- /dev/null +++ b/source/lambda/online/langchain_integration/chat_models/__init__.py @@ -0,0 +1,97 @@ +""" +chat models build in command pattern +""" +from common_logic.common_utils.constant import LLMModelType + + +class ModeMixins: + @staticmethod + def convert_messages_role(messages:list[dict],role_map:dict): + """ + Args: + messages (list[dict]): + role_map (dict): {"current_role":"targe_role"} + + Returns: + _type_: as messages + """ + valid_roles = list(role_map.keys()) + new_messages = [] + for message in messages: + message = {**message} + role = message['role'] + assert role in valid_roles,(role,valid_roles,messages) + message['role'] = role_map[role] + new_messages.append(message) + return new_messages + + +class ModelMeta(type): + def __new__(cls, name, bases, attrs): + new_cls = type.__new__(cls, name, bases, attrs) + if name == "Model" or new_cls.model_id is None: + return new_cls + new_cls.model_map[new_cls.model_id] = new_cls + return new_cls + + +class Model(ModeMixins,metaclass=ModelMeta): + model_id = None + model_map = {} + + @classmethod + def create_model(cls, model_kwargs=None, **kwargs): + raise NotImplementedError + + @classmethod + def get_model(cls, model_id, model_kwargs=None, **kwargs): + # dynamic load module + _load_module(model_id) + return cls.model_map[model_id].create_model(model_kwargs=model_kwargs, **kwargs) + +def _import_bedrock_models(): + from .bedrock_models import ( + Claude2, + ClaudeInstance, + Claude21, + Claude3Sonnet, + Claude3Haiku, + Claude35Sonnet, + MistralLarge2407, + Llama3d1Instruct70B, + CohereCommandRPlus + ) + +def _import_openai_models(): + from .openai_models import ( + ChatGPT35, + ChatGPT4Turbo, + ChatGPT4o + ) + + +def _load_module(model_id): + assert model_id in MODEL_MODULE_LOAD_FN_MAP,(model_id,MODEL_MODULE_LOAD_FN_MAP) + MODEL_MODULE_LOAD_FN_MAP[model_id]() + + +MODEL_MODULE_LOAD_FN_MAP = { + LLMModelType.CHATGPT_35_TURBO_0125:_import_openai_models, + LLMModelType.CHATGPT_4_TURBO:_import_openai_models, + LLMModelType.CHATGPT_4O:_import_openai_models, + LLMModelType.CLAUDE_2:_import_bedrock_models, + LLMModelType.CLAUDE_INSTANCE:_import_bedrock_models, + LLMModelType.CLAUDE_21:_import_bedrock_models, + LLMModelType.CLAUDE_3_SONNET:_import_bedrock_models, + LLMModelType.CLAUDE_3_HAIKU:_import_bedrock_models, + LLMModelType.CLAUDE_3_5_SONNET:_import_bedrock_models, + LLMModelType.LLAMA3_1_70B_INSTRUCT:_import_bedrock_models, + LLMModelType.MISTRAL_LARGE_2407:_import_bedrock_models, + LLMModelType.COHERE_COMMAND_R_PLUS:_import_bedrock_models, +} + + + + + + diff --git a/source/lambda/online/langchain_integration/chat_models/bedrock_models.py b/source/lambda/online/langchain_integration/chat_models/bedrock_models.py new file mode 100644 index 000000000..3239e0123 --- /dev/null +++ b/source/lambda/online/langchain_integration/chat_models/bedrock_models.py @@ -0,0 +1,77 @@ +import os +from langchain_aws.chat_models import ChatBedrockConverse +from common_logic.common_utils.constant import ( + MessageType, + LLMModelType +) +from common_logic.common_utils.logger_utils import get_logger +from . import Model + + +# AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE +# HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE +# SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE + +logger = get_logger("bedrock_model") + +# Bedrock model type +class Claude2(Model): + model_id = LLMModelType.CLAUDE_2 + default_model_kwargs = {"max_tokens": 2000, "temperature": 0.7, "top_p": 0.9} + + @classmethod + def create_model(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + credentials_profile_name = ( + kwargs.get("credentials_profile_name", None) + or os.environ.get("AWS_PROFILE", None) + or None + ) + region_name = ( + kwargs.get("region_name", None) + or os.environ.get("BEDROCK_REGION", None) + or None + ) + llm = ChatBedrockConverse( + credentials_profile_name=credentials_profile_name, + region_name=region_name, + model=cls.model_id, + **model_kwargs, + ) + return llm + + +class ClaudeInstance(Claude2): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude21(Claude2): + model_id = LLMModelType.CLAUDE_21 + + +class Claude3Sonnet(Claude2): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3Haiku(Claude2): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35Sonnet(Claude2): + model_id = LLMModelType.CLAUDE_3_5_SONNET + + +class MistralLarge2407(Claude2): + model_id = LLMModelType.MISTRAL_LARGE_2407 + + +class Llama3d1Instruct70B(Claude2): + model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + +class CohereCommandRPlus(Claude2): + model_id = LLMModelType.COHERE_COMMAND_R_PLUS + + + diff --git a/source/lambda/online/langchain_integration/chat_models/openai_models.py b/source/lambda/online/langchain_integration/chat_models/openai_models.py new file mode 100644 index 000000000..fdddeb454 --- /dev/null +++ b/source/lambda/online/langchain_integration/chat_models/openai_models.py @@ -0,0 +1,28 @@ +from langchain_openai import ChatOpenAI +from common_logic.common_utils.constant import LLMModelType +from common_logic.common_utils.logger_utils import get_logger +from . import Model + +logger = get_logger("openai_model") + +class ChatGPT35(Model): + model_id = LLMModelType.CHATGPT_35_TURBO_0125 + default_model_kwargs = {"max_tokens": 2000, "temperature": 0.7, "top_p": 0.9} + + @classmethod + def create_model(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + llm = ChatOpenAI( + model=cls.model_id, + **model_kwargs, + ) + return llm + + +class ChatGPT4Turbo(ChatGPT35): + model_id = LLMModelType.CHATGPT_4_TURBO + + +class ChatGPT4o(ChatGPT35): + model_id = LLMModelType.CHATGPT_4O \ No newline at end of file From 4f43680fb67fc8d5bf4e468ec701d2e8ce01190f Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Fri, 18 Oct 2024 09:52:47 +0800 Subject: [PATCH 011/110] update buildspec.yaml --- api_test/buildspec-20241012.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api_test/buildspec-20241012.yaml b/api_test/buildspec-20241012.yaml index eab92c1b3..12b80fd18 100644 --- a/api_test/buildspec-20241012.yaml +++ b/api_test/buildspec-20241012.yaml @@ -72,6 +72,7 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" + sed -i 's/support@example.com/$SUB_EMAIL/g' bin/config.ts pnpm i npx cdk synth 2>&1 | tee synth.log if [ ${PIPESTATUS[0]} -ne 0 ]; then @@ -89,7 +90,7 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then pushd "Intelli-Agent/source/infrastructure" pnpm i - npx cdk deploy + npx cdk deploy $STACK_NAME deploy_exit_code=$? if [ $deploy_exit_code -ne 0 ]; then echo "CDK deployment failed. Sending email and exiting with status code 1." From 0e9cc927c82a3d8375aeb45e0f96696035041593 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Fri, 18 Oct 2024 05:54:41 +0000 Subject: [PATCH 012/110] modify --- source/lambda/online/functions/_tool_base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/source/lambda/online/functions/_tool_base.py b/source/lambda/online/functions/_tool_base.py index 63fca04dc..c4084d2c5 100644 --- a/source/lambda/online/functions/_tool_base.py +++ b/source/lambda/online/functions/_tool_base.py @@ -3,6 +3,7 @@ from enum import Enum from common_logic.common_utils.constant import SceneType,ToolRuningMode + class ToolDefType(Enum): openai = "openai" @@ -19,6 +20,7 @@ class Tool(BaseModel): scene: str = Field(description="tool use scene",default=SceneType.COMMON) # should_ask_parameter: bool = Field(description="tool use scene") + class ToolManager: def __init__(self) -> None: self.tools = {} From 7d34b82b557d22483d49c30bc1159108b8139897 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Fri, 18 Oct 2024 05:55:06 +0000 Subject: [PATCH 013/110] modify --- .../langchain_integration/chat_models/bedrock_models.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/source/lambda/online/langchain_integration/chat_models/bedrock_models.py b/source/lambda/online/langchain_integration/chat_models/bedrock_models.py index 3239e0123..162ab998a 100644 --- a/source/lambda/online/langchain_integration/chat_models/bedrock_models.py +++ b/source/lambda/online/langchain_integration/chat_models/bedrock_models.py @@ -8,10 +8,6 @@ from . import Model -# AI_MESSAGE_TYPE = MessageType.AI_MESSAGE_TYPE -# HUMAN_MESSAGE_TYPE = MessageType.HUMAN_MESSAGE_TYPE -# SYSTEM_MESSAGE_TYPE = MessageType.SYSTEM_MESSAGE_TYPE - logger = get_logger("bedrock_model") # Bedrock model type From 7243b3b8a702193b3518562ea1a3fc3bf349ef20 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 22 Oct 2024 07:10:53 +0000 Subject: [PATCH 014/110] feat: qq match --- .../retriever/utils/aos_retrievers.py | 16 ++++++++---- .../main_utils/online_entries/agent_base.py | 25 ++++++++++++++++--- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py b/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py index a4ca38246..5fb9ff4d5 100644 --- a/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py +++ b/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py @@ -430,6 +430,7 @@ def organize_faq_results( result["source"] = metadata[source_field] result["kwargs"] = metadata.get("kwargs", {}) elif "jsonlAnswer" in aos_hit["_source"]["metadata"] and "answer" in aos_hit["_source"]["metadata"]["jsonlAnswer"]: + # Intention result["answer"] = aos_hit["_source"]["metadata"]["jsonlAnswer"]["answer"] result["question"] = aos_hit["_source"]["metadata"]["jsonlAnswer"]["question"] result["content"] = aos_hit["_source"]["text"] @@ -437,17 +438,22 @@ def organize_faq_results( result[source_field] = aos_hit["_source"]["metadata"]["jsonlAnswer"][source_field] else: result[source_field] = aos_hit["_source"]["metadata"]["file_path"] + elif "jsonlAnswer" in aos_hit["_source"]["metadata"] and "answer" not in aos_hit["_source"]["metadata"]["jsonlAnswer"]: + # QQ match + result["answer"] = aos_hit["_source"]["metadata"]["jsonlAnswer"] + result["question"] = aos_hit["_source"]["text"] + result["content"] = aos_hit["_source"]["text"] + result[source_field] = aos_hit["_source"]["metadata"]["file_path"] else: result["answer"] = aos_hit["_source"]["metadata"] result["content"] = aos_hit["_source"][text_field] result["question"] = aos_hit["_source"][text_field] result[source_field] = aos_hit["_source"]["metadata"][source_field] - except: - logger.info("index_error") - logger.info(traceback.format_exc()) - logger.info(aos_hit["_source"]) + except Exception as e: + logger.error(e) + logger.error(traceback.format_exc()) + logger.error(aos_hit) continue - # result.update(aos_hit["_source"]) results.append(result) return results diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py b/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py index 7b76587ad..d1edb4d6e 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/agent_base.py @@ -30,7 +30,7 @@ def tools_choose_and_results_generation(state): agent_repeated_call_validation = state['agent_current_call_number'] < state['agent_repeated_call_limit'] send_trace( - f"\n\n**agent_current_output:** \n\n{json.dumps(agent_current_output['agent_output'],ensure_ascii=False,indent=2)}\n\n **agent_current_call_number:** {agent_current_call_number}", + f"\n\n**agent_current_output:** \n\n {json.dumps(agent_current_output['agent_output'],ensure_ascii=False,indent=4)} \n\n **agent_current_call_number:** {agent_current_call_number}", state["stream"], state["ws_connection_id"] ) @@ -41,6 +41,20 @@ def tools_choose_and_results_generation(state): } +def format_agent_result_output(data): + markdown_table = "| Tool Name | Model ID | Kwargs |\n" + markdown_table += "|-------|-------|-------|\n" + for item in data: + tool_name = item.get("name", "") + model_id = item.get("model_id", "") + kwargs = ', '.join( + [f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) + markdown_table += f"| {tool_name} | {model_id} | {kwargs} |\n" + logger.info(markdown_table) + + return markdown_table + + @node_monitor_wrapper def results_evaluation(state): # parse tool_calls: @@ -49,7 +63,8 @@ def results_evaluation(state): agent_output=state['agent_current_output'] ) tool_calls = output['tool_calls'] - send_trace(f"\n\n**tool_calls parsed:** \n{tool_calls}", state["stream"], state["ws_connection_id"], state["enable_trace"]) + md_tool_result = format_agent_result_output(tool_calls) + send_trace(f"\n\n**tool_calls parsed:** \n\n {md_tool_result}", state["stream"], state["ws_connection_id"], state["enable_trace"]) if not state["extra_response"].get("current_agent_intent_type", None): state["extra_response"]["current_agent_intent_type"] = output['tool_calls'][0]["name"] @@ -106,8 +121,10 @@ def tool_execution(state): "kwargs": tool_call['kwargs'], "model_id": tool_call['model_id'] }) - - output = format_tool_call_results(tool_call['model_id'],tool_call_results) + + output = format_tool_call_results( + tool_call['model_id'], tool_call_results) + send_trace(f'**tool_execute_res:** \n{output["tool_message"]["content"]}', enable_trace=state["enable_trace"]) return { "agent_tool_history": [output['tool_message']] From 68c1fdf49578bb8050260bcf1743d6da91d56e19 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Tue, 22 Oct 2024 16:32:03 +0800 Subject: [PATCH 015/110] update test case --- .../Intelli-Agent-RESTful-API-prod-oas30.json | 1640 +++++- api_test/README.md | 63 + api_test/biz_logic/README.md | 32 - api_test/biz_logic/openapitools.json | 7 - api_test/biz_logic/response.json | 1 - .../rest_api/docs/Aicusapico2TwvXbhsTncy.md | 31 + .../docs/Aicusapico2TwvXbhsTncyConfig.md | 30 + ...md => Aicusapico2TwvXbhsTncyItemsInner.md} | 16 +- .../rest_api/docs/Aicusapico35klzY80ikPh.md | 30 + .../docs/Aicusapico35klzY80ikPhItemsInner.md | 33 + ...sapico35klzY80ikPhItemsInnerQAListInner.md | 31 + .../rest_api/docs/Aicusapico4rwMspzeBOe5.md | 31 + .../Aicusapico4rwMspzeBOe5InputPayload.md | 34 + .../rest_api/docs/Aicusapico51RafCAYOxiZ.md | 30 + .../rest_api/docs/AicusapicoCyd129M65yKV.md | 30 + .../rest_api/docs/AicusapicoEOcLNul8cwxa.md | 33 + .../rest_api/docs/AicusapicoQjcoKzzZFI86.md | 30 + .../docs/AicusapicoQjcoKzzZFI86ItemsInner.md | 34 + ...aUMjaXtPx.md => AicusapicoUy1YBXiWJ5Aq.md} | 16 +- .../rest_api/docs/Aicusapicob9jxGQ8zv1AS.md | 31 + .../docs/Aicusapicob9jxGQ8zv1ASItemsInner.md | 38 + .../rest_api/docs/AicusapicobMN2pLK9AvE8.md | 32 + .../docs/AicusapicobMN2pLK9AvE8Index.md | 31 + .../rest_api/docs/Aicusapicoh5w3FRwxBjhG.md | 32 + .../rest_api/docs/AicusapicohQbFv37cvtQS.md | 32 + .../docs/AicusapicohQbFv37cvtQSIndexIds.md | 31 + .../rest_api/docs/AicusapicoiXUam8N8Dh8l.md | 32 + .../docs/AicusapicoiXUam8N8Dh8lItemsInner.md | 32 + .../rest_api/docs/Aicusapicoqew7t5vTA2ak.md | 30 + .../rest_api/docs/Aicusapicor1Kt5C2mLnkm.md | 29 + ...oLf1K1uex.md => AicusapicoseOArXMRpSNs.md} | 16 +- .../biz_logic/rest_api/docs/DefaultApi.md | 1730 +++++- .../rest_api/docs/IntellapicoH4A9yvm8c1p3.md | 29 - .../rest_api/docs/IntellapicoNbA0nyPxxk6q.md | 30 - .../rest_api/docs/IntellapicorVOJKT5wIzUC.md | 31 - .../docs/IntellapicorVOJKT5wIzUCConfig.md | 30 - .../rest_api/docs/Intellapicormo5LBZXS9Rb.md | 30 - .../rest_api/docs/IntellapicowXaFAEWeTgPt.md | 30 - .../docs/IntellapicowXaFAEWeTgPtItemsInner.md | 34 - .../rest_api/openapi_client/__init__.py | 43 +- .../openapi_client/api/default_api.py | 5056 ++++++++++++++++- .../rest_api/openapi_client/api_client.py | 6 +- .../rest_api/openapi_client/configuration.py | 18 +- .../rest_api/openapi_client/exceptions.py | 6 +- .../openapi_client/models/__init__.py | 42 +- ..._iz_uc.py => aicusapico2_twv_xbhs_tncy.py} | 26 +- ...py => aicusapico2_twv_xbhs_tncy_config.py} | 14 +- ... aicusapico2_twv_xbhs_tncy_items_inner.py} | 14 +- ...e_tg_pt.py => aicusapico35klz_y80ik_ph.py} | 20 +- .../aicusapico35klz_y80ik_ph_items_inner.py | 103 + ...5klz_y80ik_ph_items_inner_qa_list_inner.py | 91 + .../models/aicusapico4rw_mspze_boe5.py | 95 + .../aicusapico4rw_mspze_boe5_input_payload.py | 97 + ...s9_rb.py => aicusapico51_raf_cay_oxi_z.py} | 14 +- .../models/aicusapico_cyd129_m65y_kv.py | 89 + .../models/aicusapico_eoc_l_nul8cwxa.py | 95 + .../models/aicusapico_qjco_kzz_zfi86.py | 97 + ... aicusapico_qjco_kzz_zfi86_items_inner.py} | 14 +- ...xt_px.py => aicusapico_uy1_ybxi_wj5_aq.py} | 14 +- .../models/aicusapicob9jx_gq8zv1_as.py | 103 + .../aicusapicob9jx_gq8zv1_as_items_inner.py | 105 + .../models/aicusapicob_mn2p_lk9_av_e8.py | 97 + .../aicusapicob_mn2p_lk9_av_e8_index.py | 91 + .../models/aicusapicoh5w3_f_rwx_bjh_g.py | 93 + .../models/aicusapicoh_qb_fv37cvt_qs.py | 97 + .../aicusapicoh_qb_fv37cvt_qs_index_ids.py | 91 + .../models/aicusapicoi_x_uam8_n8_dh8l.py | 105 + .../aicusapicoi_x_uam8_n8_dh8l_items_inner.py | 93 + ...y_pxxk6q.py => aicusapicoqew7t5v_ta2ak.py} | 14 +- ...m8c1p3.py => aicusapicor1_kt5_c2m_lnkm.py} | 14 +- ..._k1uex.py => aicusapicose_oar_xmrp_sns.py} | 14 +- .../biz_logic/rest_api/openapi_client/rest.py | 6 +- .../biz_logic/rest_api/test-requirements.txt | 5 - api_test/buildspec-20241012.yaml | 22 +- api_test/sourceGen.sh | 16 + api_test/test_case/test_01_rest_document.py | 4 +- source/infrastructure/lib/api/api-stack.ts | 11 +- 77 files changed, 10320 insertions(+), 1077 deletions(-) rename api_test/{biz_logic => }/Intelli-Agent-RESTful-API-prod-oas30.json (55%) create mode 100644 api_test/README.md delete mode 100644 api_test/biz_logic/README.md delete mode 100644 api_test/biz_logic/openapitools.json delete mode 100644 api_test/biz_logic/response.json create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md rename api_test/biz_logic/rest_api/docs/{IntellapicorVOJKT5wIzUCItemsInner.md => Aicusapico2TwvXbhsTncyItemsInner.md} (57%) create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md rename api_test/biz_logic/rest_api/docs/{IntellapicoXeXaUMjaXtPx.md => AicusapicoUy1YBXiWJ5Aq.md} (51%) create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md rename api_test/biz_logic/rest_api/docs/{IntellapicoNK9oLf1K1uex.md => AicusapicoseOArXMRpSNs.md} (54%) delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicoH4A9yvm8c1p3.md delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicoNbA0nyPxxk6q.md delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUC.md delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCConfig.md delete mode 100644 api_test/biz_logic/rest_api/docs/Intellapicormo5LBZXS9Rb.md delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPt.md delete mode 100644 api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPtItemsInner.md rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicor_vojkt5w_iz_uc.py => aicusapico2_twv_xbhs_tncy.py} (72%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicor_vojkt5w_iz_uc_config.py => aicusapico2_twv_xbhs_tncy_config.py} (86%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicor_vojkt5w_iz_uc_items_inner.py => aicusapico2_twv_xbhs_tncy_items_inner.py} (90%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicow_xa_faewe_tg_pt.py => aicusapico35klz_y80ik_ph.py} (78%) create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicormo5_lbzxs9_rb.py => aicusapico51_raf_cay_oxi_z.py} (86%) create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py rename api_test/biz_logic/rest_api/openapi_client/models/{intellapicow_xa_faewe_tg_pt_items_inner.py => aicusapico_qjco_kzz_zfi86_items_inner.py} (87%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapico_xe_xa_u_mja_xt_px.py => aicusapico_uy1_ybxi_wj5_aq.py} (87%) create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py rename api_test/biz_logic/rest_api/openapi_client/models/{intellapico_nb_a0ny_pxxk6q.py => aicusapicoqew7t5v_ta2ak.py} (86%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapico_h4_a9yvm8c1p3.py => aicusapicor1_kt5_c2m_lnkm.py} (86%) rename api_test/biz_logic/rest_api/openapi_client/models/{intellapico_nk9o_lf1_k1uex.py => aicusapicose_oar_xmrp_sns.py} (89%) delete mode 100644 api_test/biz_logic/rest_api/test-requirements.txt create mode 100755 api_test/sourceGen.sh diff --git a/api_test/biz_logic/Intelli-Agent-RESTful-API-prod-oas30.json b/api_test/Intelli-Agent-RESTful-API-prod-oas30.json similarity index 55% rename from api_test/biz_logic/Intelli-Agent-RESTful-API-prod-oas30.json rename to api_test/Intelli-Agent-RESTful-API-prod-oas30.json index d8cafbc3a..4111eda58 100644 --- a/api_test/biz_logic/Intelli-Agent-RESTful-API-prod-oas30.json +++ b/api_test/Intelli-Agent-RESTful-API-prod-oas30.json @@ -1,12 +1,12 @@ { "openapi" : "3.0.1", "info" : { - "title" : "Intelli-Agent-RESTful-API", - "description" : "Intelli-Agent RESTful API", - "version" : "2024-07-29T06:23:46Z" + "title" : "aics-api", + "description" : "AI-Customer-Service - Core API", + "version" : "2024-10-21T08:32:58Z" }, "servers" : [ { - "url" : "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/{basePath}", + "url" : "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/{basePath}", "variables" : { "basePath" : { "default" : "prod" @@ -14,7 +14,7 @@ } } ], "paths" : { - "/aos" : { + "/chatbot-management/check-default-chatbot" : { "get" : { "responses" : { "400" : { @@ -49,10 +49,52 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/intention/execution-presigned-url" : { "post" : { + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapicoCyd129M65yKV" + } + } + }, + "required" : true + }, "responses" : { "400" : { "description" : "400 response", @@ -79,14 +121,14 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "$ref" : "#/components/schemas/aicusapicoUy1YBXiWJ5Aq" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -120,40 +162,8 @@ } } }, - "/prompt-management" : { - "options" : { - "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } - } - }, - "content" : { } - } - } - } - }, - "/chat-history/sessions" : { - "get" : { + "/llm" : { + "post" : { "responses" : { "400" : { "description" : "400 response", @@ -187,7 +197,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -221,8 +231,8 @@ } } }, - "/llm" : { - "post" : { + "/prompt-management/models" : { + "get" : { "responses" : { "400" : { "description" : "400 response", @@ -256,7 +266,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -290,8 +300,23 @@ } } }, - "/prompt-management/models" : { + "/intention/executions/{executionId}" : { "get" : { + "parameters" : [ { + "name" : "intentionId", + "in" : "path", + "required" : true, + "schema" : { + "type" : "string" + } + }, { + "name" : "executionId", + "in" : "path", + "required" : true, + "schema" : { + "type" : "string" + } + } ], "responses" : { "400" : { "description" : "400 response", @@ -318,17 +343,25 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "$ref" : "#/components/schemas/aicusapico35klzY80ikPh" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "schema" : { + "type" : "string" + } + } ], "responses" : { "204" : { "description" : "204 response", @@ -359,7 +392,7 @@ } } }, - "/knowledge-base/executions" : { + "/chatbot-management/chatbots" : { "get" : { "parameters" : [ { "name" : "page_size", @@ -400,14 +433,14 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicorVOJKT5wIzUC" + "$ref" : "#/components/schemas/aicusapicoiXUam8N8Dh8l" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "post" : { @@ -415,54 +448,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicoNK9oLf1K1uex" - } - } - }, - "required" : true - }, - "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - } - }, - "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] - } ] - }, - "delete" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/intellapicoH4A9yvm8c1p3" + "$ref" : "#/components/schemas/aicusapicobMN2pLK9AvE8" } } }, @@ -494,14 +480,14 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicoNbA0nyPxxk6q" + "$ref" : "#/components/schemas/aicusapicohQbFv37cvtQS" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -535,7 +521,7 @@ } } }, - "/chatbot-management/chatbots" : { + "/prompt-management/prompts" : { "get" : { "responses" : { "400" : { @@ -570,7 +556,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "post" : { @@ -607,7 +593,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -641,7 +627,7 @@ } } }, - "/" : { + "/chatbot-management" : { "options" : { "responses" : { "204" : { @@ -673,16 +659,8 @@ } } }, - "/knowledge-base/executions/{executionId}" : { - "get" : { - "parameters" : [ { - "name" : "executionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" - } - } ], + "/chat-history" : { + "post" : { "responses" : { "400" : { "description" : "400 response", @@ -709,25 +687,17 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicowXaFAEWeTgPt" + "$ref" : "#/components/schemas/Empty" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { - "parameters" : [ { - "name" : "executionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" - } - } ], "responses" : { "204" : { "description" : "204 response", @@ -758,8 +728,8 @@ } } }, - "/prompt-management/prompts" : { - "get" : { + "/chatbot-management/check-chatbot" : { + "post" : { "responses" : { "400" : { "description" : "400 response", @@ -793,46 +763,41 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, - "post" : { + "options" : { "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "type" : "string" } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { + }, + "Access-Control-Allow-Methods" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "type" : "string" } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { + }, + "Access-Control-Allow-Credentials" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" } } - } + }, + "content" : { } } - }, - "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] - } ] - }, + } + } + }, + "/knowledge-base" : { "options" : { "responses" : { "204" : { @@ -864,7 +829,7 @@ } } }, - "/prompt-management/scenes" : { + "/aos" : { "get" : { "responses" : { "400" : { @@ -899,22 +864,59 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, - "options" : { + "post" : { "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } - }, - "Access-Control-Allow-Methods" : { + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" } }, "Access-Control-Allow-Credentials" : { @@ -933,7 +935,7 @@ } } }, - "/chatbot-management" : { + "/prompt-management" : { "options" : { "responses" : { "204" : { @@ -965,8 +967,8 @@ } } }, - "/extract" : { - "post" : { + "/chat-history/sessions" : { + "get" : { "responses" : { "400" : { "description" : "400 response", @@ -1000,7 +1002,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -1034,8 +1036,53 @@ } } }, - "/chat-history/messages" : { + "/intention" : { + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/intention/executions" : { "get" : { + "parameters" : [ { + "name" : "page_size", + "in" : "query", + "schema" : { + "type" : "string" + } + }, { + "name" : "max_items", + "in" : "query", + "schema" : { + "type" : "string" + } + } ], "responses" : { "400" : { "description" : "400 response", @@ -1062,49 +1109,64 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Empty" + "$ref" : "#/components/schemas/aicusapicob9jxGQ8zv1AS" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, - "options" : { + "post" : { + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapicoEOcLNul8cwxa" + } + } + }, + "required" : true + }, "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Methods" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } - }, - "Access-Control-Allow-Credentials" : { + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } - }, - "Access-Control-Allow-Headers" : { + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/aicusapico4rwMspzeBOe5" } } - }, - "content" : { } + } } - } - } - }, - "/chat-history" : { - "post" : { + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "delete" : { "responses" : { "400" : { "description" : "400 response", @@ -1138,7 +1200,7 @@ } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -1172,45 +1234,110 @@ } } }, - "/knowledge-base" : { - "options" : { + "/knowledge-base/executions" : { + "get" : { + "parameters" : [ { + "name" : "page_size", + "in" : "query", + "schema" : { + "type" : "string" + } + }, { + "name" : "max_items", + "in" : "query", + "schema" : { + "type" : "string" + } + } ], "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } - }, - "Access-Control-Allow-Methods" : { + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } - }, - "Access-Control-Allow-Credentials" : { + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/aicusapico2TwvXbhsTncy" } - }, - "Access-Control-Allow-Headers" : { + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "post" : { + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapicoseOArXMRpSNs" + } + } + }, + "required" : true + }, + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/Empty" } } - }, - "content" : { } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } } - } - } - }, - "/knowledge-base/kb-presigned-url" : { - "post" : { + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "delete" : { "requestBody" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicormo5LBZXS9Rb" + "$ref" : "#/components/schemas/aicusapicor1Kt5C2mLnkm" } } }, @@ -1242,14 +1369,14 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/intellapicoXeXaUMjaXtPx" + "$ref" : "#/components/schemas/aicusapicoqew7t5vTA2ak" } } } } }, "security" : [ { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : [ ] + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, "options" : { @@ -1282,46 +1409,869 @@ } } } - } - }, - "components" : { - "schemas" : { - "intellapicormo5LBZXS9Rb" : { - "title" : "PostPayload", - "required" : [ "content_type", "file_name" ], + }, + "/" : { + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/knowledge-base/executions/{executionId}" : { + "get" : { + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "schema" : { + "type" : "string" + } + } ], + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapicoQjcoKzzZFI86" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "schema" : { + "type" : "string" + } + } ], + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/intention/download-template" : { + "get" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/prompt-management/scenes" : { + "get" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/intention/index-used-scan" : { + "post" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/extract" : { + "post" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/chat-history/messages" : { + "get" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/chatbot-management/embeddings" : { + "get" : { + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + }, + "/knowledge-base/kb-presigned-url" : { + "post" : { + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapico51RafCAYOxiZ" + } + } + }, + "required" : true + }, + "responses" : { + "400" : { + "description" : "400 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "500" : { + "description" : "500 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/Empty" + } + } + } + }, + "200" : { + "description" : "200 response", + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/aicusapicoh5w3FRwxBjhG" + } + } + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "options" : { + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Methods" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Credentials" : { + "schema" : { + "type" : "string" + } + }, + "Access-Control-Allow-Headers" : { + "schema" : { + "type" : "string" + } + } + }, + "content" : { } + } + } + } + } + }, + "components" : { + "schemas" : { + "aicusapico2TwvXbhsTncy" : { + "title" : "ResponsePayload", + "type" : "object", + "properties" : { + "Config" : { + "type" : "object", + "properties" : { + "PageSize" : { + "type" : "integer" + }, + "MaxItems" : { + "type" : "integer" + } + } + }, + "Items" : { + "type" : "array", + "items" : { + "required" : [ "chatbotId", "createTime", "embeddingModelType", "executionId", "executionStatus", "groupName", "indexId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix", "sfnExecutionId", "uiStatus" ], + "type" : "object", + "properties" : { + "executionStatus" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "uiStatus" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + }, + "qaEnhance" : { + "type" : "string" + }, + "sfnExecutionId" : { + "type" : "string" + }, + "embeddingModelType" : { + "type" : "string" + }, + "offline" : { + "type" : "string" + }, + "executionId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "indexType" : { + "type" : "string" + }, + "createTime" : { + "type" : "string" + }, + "indexId" : { + "type" : "string" + }, + "operationType" : { + "type" : "string" + } + } + } + }, + "Count" : { + "type" : "integer" + } + } + }, + "aicusapicoseOArXMRpSNs" : { + "title" : "PostPayload", + "required" : [ "chatbotId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix" ], + "type" : "object", + "properties" : { + "offline" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "indexType" : { + "type" : "string" + }, + "operationType" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + }, + "qaEnhance" : { + "type" : "string" + } + } + }, + "aicusapicobMN2pLK9AvE8" : { + "title" : "PostPayload", + "required" : [ "chatbotId", "index", "modelId", "modelName" ], "type" : "object", "properties" : { - "content_type" : { + "modelName" : { "type" : "string" }, - "file_name" : { + "chatbotId" : { "type" : "string" + }, + "modelId" : { + "type" : "string" + }, + "index" : { + "required" : [ "intention", "qd", "qq" ], + "type" : "object", + "properties" : { + "qq" : { + "type" : "string" + }, + "qd" : { + "type" : "string" + }, + "intention" : { + "type" : "string" + } + } } } }, - "Empty" : { - "title" : "Empty Schema", - "type" : "object" + "aicusapicoEOcLNul8cwxa" : { + "title" : "PostPayload", + "required" : [ "chatbotId", "index", "model", "s3Bucket", "s3Prefix" ], + "type" : "object", + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "index" : { + "type" : "string" + }, + "model" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + } + } + }, + "aicusapicoUy1YBXiWJ5Aq" : { + "title" : "ResponsePayload", + "type" : "object", + "properties" : { + "data" : { + "type" : "string" + }, + "message" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + } + } }, - "intellapicoNbA0nyPxxk6q" : { + "aicusapicoh5w3FRwxBjhG" : { "title" : "ResponsePayload", "type" : "object", "properties" : { "data" : { + "type" : "string" + }, + "message" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + } + } + }, + "aicusapicor1Kt5C2mLnkm" : { + "title" : "PostPayload", + "required" : [ "executionId" ], + "type" : "object", + "properties" : { + "executionId" : { "type" : "array", "items" : { "type" : "string" } + } + } + }, + "aicusapico4rwMspzeBOe5" : { + "title" : "ResponsePayload", + "type" : "object", + "properties" : { + "result" : { + "type" : "string" }, - "message" : { + "execution_id" : { "type" : "string" + }, + "input_payload" : { + "type" : "object", + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "tableItemId" : { + "type" : "string" + }, + "fieldName" : { + "type" : "string" + }, + "index" : { + "type" : "string" + }, + "model" : { + "type" : "string" + } + } } } }, - "intellapicorVOJKT5wIzUC" : { + "aicusapicoiXUam8N8Dh8l" : { "title" : "ResponsePayload", "type" : "object", "properties" : { + "chatbot_ids" : { + "type" : "array", + "items" : { + "type" : "string" + } + }, "Config" : { "type" : "object", "properties" : { @@ -1336,52 +2286,78 @@ "Items" : { "type" : "array", "items" : { - "required" : [ "chatbotId", "createTime", "embeddingModelType", "executionId", "executionStatus", "groupName", "indexId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix", "sfnExecutionId", "uiStatus" ], + "required" : [ "ChatbotId", "LastModifiedTime", "ModelId", "ModelName" ], "type" : "object", "properties" : { - "executionStatus" : { - "type" : "string" - }, - "s3Prefix" : { + "ChatbotId" : { "type" : "string" }, - "uiStatus" : { + "ModelName" : { "type" : "string" }, - "s3Bucket" : { + "LastModifiedTime" : { "type" : "string" }, - "qaEnhance" : { + "ModelId" : { "type" : "string" - }, - "sfnExecutionId" : { + } + } + } + }, + "Count" : { + "type" : "integer" + } + } + }, + "aicusapicob9jxGQ8zv1AS" : { + "title" : "ResponsePayload", + "type" : "object", + "properties" : { + "Config" : { + "type" : "object", + "properties" : { + "PageSize" : { + "type" : "integer" + }, + "MaxItems" : { + "type" : "integer" + } + } + }, + "Items" : { + "type" : "array", + "items" : { + "required" : [ "chatbotId", "createBy", "createTime", "details", "executionId", "executionStatus", "fileName", "index", "model", "tag" ], + "type" : "object", + "properties" : { + "executionId" : { "type" : "string" }, - "embeddingModelType" : { + "fileName" : { "type" : "string" }, - "offline" : { + "createBy" : { "type" : "string" }, - "executionId" : { + "chatbotId" : { "type" : "string" }, - "groupName" : { + "createTime" : { "type" : "string" }, - "chatbotId" : { + "executionStatus" : { "type" : "string" }, - "indexType" : { + "index" : { "type" : "string" }, - "createTime" : { + "model" : { "type" : "string" }, - "indexId" : { + "details" : { "type" : "string" }, - "operationType" : { + "tag" : { "type" : "string" } } @@ -1392,7 +2368,53 @@ } } }, - "intellapicowXaFAEWeTgPt" : { + "Empty" : { + "title" : "Empty Schema", + "type" : "object" + }, + "aicusapicoCyd129M65yKV" : { + "title" : "PostPayload", + "required" : [ "content_type", "file_name" ], + "type" : "object", + "properties" : { + "content_type" : { + "type" : "string" + }, + "file_name" : { + "type" : "string" + } + } + }, + "aicusapicohQbFv37cvtQS" : { + "title" : "ResponsePayload", + "type" : "object", + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "indexIds" : { + "type" : "object", + "properties" : { + "qq" : { + "type" : "string" + }, + "qd" : { + "type" : "string" + }, + "intention" : { + "type" : "string" + } + } + }, + "Message" : { + "type" : "string" + } + } + }, + "aicusapicoQjcoKzzZFI86" : { "title" : "ResponsePayload", "type" : "object", "properties" : { @@ -1428,68 +2450,84 @@ } } }, - "intellapicoNK9oLf1K1uex" : { - "title" : "PostPayload", - "required" : [ "chatbotId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix" ], + "aicusapicoqew7t5vTA2ak" : { + "title" : "ResponsePayload", "type" : "object", "properties" : { - "offline" : { - "type" : "string" - }, - "chatbotId" : { - "type" : "string" - }, - "indexType" : { - "type" : "string" - }, - "operationType" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" + "data" : { + "type" : "array", + "items" : { + "type" : "string" + } }, - "qaEnhance" : { + "message" : { "type" : "string" } } }, - "intellapicoXeXaUMjaXtPx" : { + "aicusapico35klzY80ikPh" : { "title" : "ResponsePayload", "type" : "object", "properties" : { - "data" : { - "type" : "string" - }, - "message" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" + "Items" : { + "type" : "array", + "items" : { + "required" : [ "createTime", "s3Path", "s3Prefix", "status" ], + "type" : "object", + "properties" : { + "s3Path" : { + "type" : "string" + }, + "createTime" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "QAList" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { + "question" : { + "type" : "string" + }, + "kwargs" : { + "type" : "string" + }, + "intention" : { + "type" : "string" + } + } + } + }, + "status" : { + "type" : "string" + } + } + } }, - "s3Bucket" : { - "type" : "string" + "Count" : { + "type" : "integer" } } }, - "intellapicoH4A9yvm8c1p3" : { + "aicusapico51RafCAYOxiZ" : { "title" : "PostPayload", - "required" : [ "executionId" ], + "required" : [ "content_type", "file_name" ], "type" : "object", "properties" : { - "executionId" : { - "type" : "array", - "items" : { - "type" : "string" - } + "content_type" : { + "type" : "string" + }, + "file_name" : { + "type" : "string" } } } }, "securitySchemes" : { - "intelliagentapiconstructApiAuthorizerFB94A0DF" : { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : { "type" : "apiKey", "name" : "Authorization", "in" : "header", diff --git a/api_test/README.md b/api_test/README.md new file mode 100644 index 000000000..9e4da7d35 --- /dev/null +++ b/api_test/README.md @@ -0,0 +1,63 @@ +# Generating a Client with OpenAPI Generator + +### Installing OpenAPI Generator + +If you haven't installed OpenAPI Generator yet, you can install it using npm (Node.js required) or Homebrew (on macOS): + +To install with npm: +```shell +sudo npm install @openapitools/openapi-generator-cli -g +``` + +To install with Homebrew: +```shell +brew install openapi-generator +``` + +### Generating the Client +Use the following command to generate the client code. Assuming your OpenAPI specification file is named openapi.json and you want to generate a Python client: + +```shell +./sourceGen.sh +``` +In this command: + +* -i openapi.json specifies the input file. +* -g python specifies the client language to generate, in this case, Python. +* -o ./generated-client specifies the output directory. + + +### Viewing the Generated Client +Once the generation is complete, you can find the generated client code in the specified output directory (e.g., ./generated-client). + +### Updating the code +Use the generated code to write test cases, referring to the Markdown documents in the biz_logic/rest_api/docs directory. + +### Generating API access token +Since the API for testing requires a token, the command below can be used to obtain token-related information. + +```shell +aws cognito-idp initiate-auth \ + --region $REGION \ + --auth-flow USER_PASSWORD_AUTH \ + --client-id $OIDC_CLIENT_ID \ + --auth-parameters USERNAME=$SUB_EMAIL,PASSWORD=$ADMIN_PASSWORD +``` +In this command: + +* REGION, SUB_EMAIL, and ADMIN_PASSWORD are the variables defined in the buildspec.yaml +* OIDC_CLIENT_ID is output variable generated by the deployment of the Stack, which can be obtained from the CloudFormation console. + +IdToken returned by the above command is the AccessToken required to access the API. + +### Updating env +Updating env file in which, + + +```shell +aws cognito-idp initiate-auth \ + --region us-east-1 \ + --auth-flow USER_PASSWORD_AUTH \ + --client-id 1t40t7rhb7f2mef8fg356mtj6t \ + --auth-parameters USERNAME=cuihubin@amazon.com,PASSWORD=TEST123! +``` \ No newline at end of file diff --git a/api_test/biz_logic/README.md b/api_test/biz_logic/README.md deleted file mode 100644 index 403d56465..000000000 --- a/api_test/biz_logic/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Generating a Client with OpenAPI Generator - -### Installing OpenAPI Generator - -If you haven't installed OpenAPI Generator yet, you can install it using npm (Node.js required) or Homebrew (on macOS): - -To install with npm: -```shell -sudo npm install @openapitools/openapi-generator-cli -g -``` - -To install with Homebrew: -```shell -brew install openapi-generator -``` - -### Generating the Client -Use the following command to generate the client code. Assuming your OpenAPI specification file is named openapi.json and you want to generate a Python client: - -```shell -openapi-generator-cli generate -i llmApi-prod-oas30.json -g python -o ./generated-client -``` -In this command: - -* -i openapi.json specifies the input file. -* -g python specifies the client language to generate, in this case, Python. -* -o ./generated-client specifies the output directory. - -其他常见语言的选项包括: - -### Viewing the Generated Client -Once the generation is complete, you can find the generated client code in the specified output directory (e.g., ./generated-client). \ No newline at end of file diff --git a/api_test/biz_logic/openapitools.json b/api_test/biz_logic/openapitools.json deleted file mode 100644 index 5c50d6a26..000000000 --- a/api_test/biz_logic/openapitools.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", - "spaces": 2, - "generator-cli": { - "version": "7.6.0" - } -} diff --git a/api_test/biz_logic/response.json b/api_test/biz_logic/response.json deleted file mode 100644 index 0e5a1a925..000000000 --- a/api_test/biz_logic/response.json +++ /dev/null @@ -1 +0,0 @@ -{"errorMessage": "can only concatenate str (not \"dict\") to str", "errorType": "TypeError", "requestId": "9e22eae2-13db-4fea-9f65-46b4e05e311a", "stackTrace": [" File \"/var/task/lambda_function.py\", line 82, in lambda_handler\n __gen_uncompleted_report(event)\n", " File \"/var/task/lambda_function.py\", line 67, in __gen_uncompleted_report\n message+=event['detail']\n"]} \ No newline at end of file diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md new file mode 100644 index 000000000..ca53bff91 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md @@ -0,0 +1,31 @@ +# Aicusapico2TwvXbhsTncy + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] +**items** | [**List[Aicusapico2TwvXbhsTncyItemsInner]**](Aicusapico2TwvXbhsTncyItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico2TwvXbhsTncy from a JSON string +aicusapico2_twv_xbhs_tncy_instance = Aicusapico2TwvXbhsTncy.from_json(json) +# print the JSON string representation of the object +print(Aicusapico2TwvXbhsTncy.to_json()) + +# convert the object into a dict +aicusapico2_twv_xbhs_tncy_dict = aicusapico2_twv_xbhs_tncy_instance.to_dict() +# create an instance of Aicusapico2TwvXbhsTncy from a dict +aicusapico2_twv_xbhs_tncy_from_dict = Aicusapico2TwvXbhsTncy.from_dict(aicusapico2_twv_xbhs_tncy_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md new file mode 100644 index 000000000..ca597a5cf --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md @@ -0,0 +1,30 @@ +# Aicusapico2TwvXbhsTncyConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**page_size** | **int** | | [optional] +**max_items** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico2TwvXbhsTncyConfig from a JSON string +aicusapico2_twv_xbhs_tncy_config_instance = Aicusapico2TwvXbhsTncyConfig.from_json(json) +# print the JSON string representation of the object +print(Aicusapico2TwvXbhsTncyConfig.to_json()) + +# convert the object into a dict +aicusapico2_twv_xbhs_tncy_config_dict = aicusapico2_twv_xbhs_tncy_config_instance.to_dict() +# create an instance of Aicusapico2TwvXbhsTncyConfig from a dict +aicusapico2_twv_xbhs_tncy_config_from_dict = Aicusapico2TwvXbhsTncyConfig.from_dict(aicusapico2_twv_xbhs_tncy_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCItemsInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md similarity index 57% rename from api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCItemsInner.md rename to api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md index beb0a983d..59c11f893 100644 --- a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCItemsInner.md +++ b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md @@ -1,4 +1,4 @@ -# IntellapicorVOJKT5wIzUCItemsInner +# Aicusapico2TwvXbhsTncyItemsInner ## Properties @@ -24,19 +24,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.intellapicor_vojkt5w_iz_uc_items_inner import IntellapicorVOJKT5wIzUCItemsInner +from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner # TODO update the JSON string below json = "{}" -# create an instance of IntellapicorVOJKT5wIzUCItemsInner from a JSON string -intellapicor_vojkt5w_iz_uc_items_inner_instance = IntellapicorVOJKT5wIzUCItemsInner.from_json(json) +# create an instance of Aicusapico2TwvXbhsTncyItemsInner from a JSON string +aicusapico2_twv_xbhs_tncy_items_inner_instance = Aicusapico2TwvXbhsTncyItemsInner.from_json(json) # print the JSON string representation of the object -print(IntellapicorVOJKT5wIzUCItemsInner.to_json()) +print(Aicusapico2TwvXbhsTncyItemsInner.to_json()) # convert the object into a dict -intellapicor_vojkt5w_iz_uc_items_inner_dict = intellapicor_vojkt5w_iz_uc_items_inner_instance.to_dict() -# create an instance of IntellapicorVOJKT5wIzUCItemsInner from a dict -intellapicor_vojkt5w_iz_uc_items_inner_from_dict = IntellapicorVOJKT5wIzUCItemsInner.from_dict(intellapicor_vojkt5w_iz_uc_items_inner_dict) +aicusapico2_twv_xbhs_tncy_items_inner_dict = aicusapico2_twv_xbhs_tncy_items_inner_instance.to_dict() +# create an instance of Aicusapico2TwvXbhsTncyItemsInner from a dict +aicusapico2_twv_xbhs_tncy_items_inner_from_dict = Aicusapico2TwvXbhsTncyItemsInner.from_dict(aicusapico2_twv_xbhs_tncy_items_inner_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md new file mode 100644 index 000000000..f58cdc133 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md @@ -0,0 +1,30 @@ +# Aicusapico35klzY80ikPh + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**items** | [**List[Aicusapico35klzY80ikPhItemsInner]**](Aicusapico35klzY80ikPhItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico35klzY80ikPh from a JSON string +aicusapico35klz_y80ik_ph_instance = Aicusapico35klzY80ikPh.from_json(json) +# print the JSON string representation of the object +print(Aicusapico35klzY80ikPh.to_json()) + +# convert the object into a dict +aicusapico35klz_y80ik_ph_dict = aicusapico35klz_y80ik_ph_instance.to_dict() +# create an instance of Aicusapico35klzY80ikPh from a dict +aicusapico35klz_y80ik_ph_from_dict = Aicusapico35klzY80ikPh.from_dict(aicusapico35klz_y80ik_ph_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md new file mode 100644 index 000000000..d793aed1f --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md @@ -0,0 +1,33 @@ +# Aicusapico35klzY80ikPhItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**s3_path** | **str** | | +**create_time** | **str** | | +**s3_prefix** | **str** | | +**qa_list** | [**List[Aicusapico35klzY80ikPhItemsInnerQAListInner]**](Aicusapico35klzY80ikPhItemsInnerQAListInner.md) | | [optional] +**status** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico35klzY80ikPhItemsInner from a JSON string +aicusapico35klz_y80ik_ph_items_inner_instance = Aicusapico35klzY80ikPhItemsInner.from_json(json) +# print the JSON string representation of the object +print(Aicusapico35klzY80ikPhItemsInner.to_json()) + +# convert the object into a dict +aicusapico35klz_y80ik_ph_items_inner_dict = aicusapico35klz_y80ik_ph_items_inner_instance.to_dict() +# create an instance of Aicusapico35klzY80ikPhItemsInner from a dict +aicusapico35klz_y80ik_ph_items_inner_from_dict = Aicusapico35klzY80ikPhItemsInner.from_dict(aicusapico35klz_y80ik_ph_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md new file mode 100644 index 000000000..c5e6c10e9 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md @@ -0,0 +1,31 @@ +# Aicusapico35klzY80ikPhItemsInnerQAListInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**question** | **str** | | [optional] +**kwargs** | **str** | | [optional] +**intention** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a JSON string +aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_instance = Aicusapico35klzY80ikPhItemsInnerQAListInner.from_json(json) +# print the JSON string representation of the object +print(Aicusapico35klzY80ikPhItemsInnerQAListInner.to_json()) + +# convert the object into a dict +aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_dict = aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_instance.to_dict() +# create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a dict +aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_from_dict = Aicusapico35klzY80ikPhItemsInnerQAListInner.from_dict(aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md b/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md new file mode 100644 index 000000000..7c0df331d --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md @@ -0,0 +1,31 @@ +# Aicusapico4rwMspzeBOe5 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**result** | **str** | | [optional] +**execution_id** | **str** | | [optional] +**input_payload** | [**Aicusapico4rwMspzeBOe5InputPayload**](Aicusapico4rwMspzeBOe5InputPayload.md) | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico4rwMspzeBOe5 from a JSON string +aicusapico4rw_mspze_boe5_instance = Aicusapico4rwMspzeBOe5.from_json(json) +# print the JSON string representation of the object +print(Aicusapico4rwMspzeBOe5.to_json()) + +# convert the object into a dict +aicusapico4rw_mspze_boe5_dict = aicusapico4rw_mspze_boe5_instance.to_dict() +# create an instance of Aicusapico4rwMspzeBOe5 from a dict +aicusapico4rw_mspze_boe5_from_dict = Aicusapico4rwMspzeBOe5.from_dict(aicusapico4rw_mspze_boe5_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md b/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md new file mode 100644 index 000000000..63f10d1f5 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md @@ -0,0 +1,34 @@ +# Aicusapico4rwMspzeBOe5InputPayload + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | [optional] +**group_name** | **str** | | [optional] +**table_item_id** | **str** | | [optional] +**field_name** | **str** | | [optional] +**index** | **str** | | [optional] +**model** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico4rwMspzeBOe5InputPayload from a JSON string +aicusapico4rw_mspze_boe5_input_payload_instance = Aicusapico4rwMspzeBOe5InputPayload.from_json(json) +# print the JSON string representation of the object +print(Aicusapico4rwMspzeBOe5InputPayload.to_json()) + +# convert the object into a dict +aicusapico4rw_mspze_boe5_input_payload_dict = aicusapico4rw_mspze_boe5_input_payload_instance.to_dict() +# create an instance of Aicusapico4rwMspzeBOe5InputPayload from a dict +aicusapico4rw_mspze_boe5_input_payload_from_dict = Aicusapico4rwMspzeBOe5InputPayload.from_dict(aicusapico4rw_mspze_boe5_input_payload_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md b/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md new file mode 100644 index 000000000..2680a966e --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md @@ -0,0 +1,30 @@ +# Aicusapico51RafCAYOxiZ + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**content_type** | **str** | | +**file_name** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico51RafCAYOxiZ from a JSON string +aicusapico51_raf_cay_oxi_z_instance = Aicusapico51RafCAYOxiZ.from_json(json) +# print the JSON string representation of the object +print(Aicusapico51RafCAYOxiZ.to_json()) + +# convert the object into a dict +aicusapico51_raf_cay_oxi_z_dict = aicusapico51_raf_cay_oxi_z_instance.to_dict() +# create an instance of Aicusapico51RafCAYOxiZ from a dict +aicusapico51_raf_cay_oxi_z_from_dict = Aicusapico51RafCAYOxiZ.from_dict(aicusapico51_raf_cay_oxi_z_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md b/api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md new file mode 100644 index 000000000..0801851ff --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md @@ -0,0 +1,30 @@ +# AicusapicoCyd129M65yKV + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**content_type** | **str** | | +**file_name** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoCyd129M65yKV from a JSON string +aicusapico_cyd129_m65y_kv_instance = AicusapicoCyd129M65yKV.from_json(json) +# print the JSON string representation of the object +print(AicusapicoCyd129M65yKV.to_json()) + +# convert the object into a dict +aicusapico_cyd129_m65y_kv_dict = aicusapico_cyd129_m65y_kv_instance.to_dict() +# create an instance of AicusapicoCyd129M65yKV from a dict +aicusapico_cyd129_m65y_kv_from_dict = AicusapicoCyd129M65yKV.from_dict(aicusapico_cyd129_m65y_kv_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md b/api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md new file mode 100644 index 000000000..c3c8b5f18 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md @@ -0,0 +1,33 @@ +# AicusapicoEOcLNul8cwxa + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | +**index** | **str** | | +**model** | **str** | | +**s3_prefix** | **str** | | +**s3_bucket** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoEOcLNul8cwxa from a JSON string +aicusapico_eoc_l_nul8cwxa_instance = AicusapicoEOcLNul8cwxa.from_json(json) +# print the JSON string representation of the object +print(AicusapicoEOcLNul8cwxa.to_json()) + +# convert the object into a dict +aicusapico_eoc_l_nul8cwxa_dict = aicusapico_eoc_l_nul8cwxa_instance.to_dict() +# create an instance of AicusapicoEOcLNul8cwxa from a dict +aicusapico_eoc_l_nul8cwxa_from_dict = AicusapicoEOcLNul8cwxa.from_dict(aicusapico_eoc_l_nul8cwxa_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md new file mode 100644 index 000000000..dad248e0b --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md @@ -0,0 +1,30 @@ +# AicusapicoQjcoKzzZFI86 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**items** | [**List[AicusapicoQjcoKzzZFI86ItemsInner]**](AicusapicoQjcoKzzZFI86ItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoQjcoKzzZFI86 from a JSON string +aicusapico_qjco_kzz_zfi86_instance = AicusapicoQjcoKzzZFI86.from_json(json) +# print the JSON string representation of the object +print(AicusapicoQjcoKzzZFI86.to_json()) + +# convert the object into a dict +aicusapico_qjco_kzz_zfi86_dict = aicusapico_qjco_kzz_zfi86_instance.to_dict() +# create an instance of AicusapicoQjcoKzzZFI86 from a dict +aicusapico_qjco_kzz_zfi86_from_dict = AicusapicoQjcoKzzZFI86.from_dict(aicusapico_qjco_kzz_zfi86_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md new file mode 100644 index 000000000..4d0914e60 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md @@ -0,0 +1,34 @@ +# AicusapicoQjcoKzzZFI86ItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**execution_id** | **str** | | +**s3_path** | **str** | | +**create_time** | **str** | | +**s3_prefix** | **str** | | +**s3_bucket** | **str** | | +**status** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a JSON string +aicusapico_qjco_kzz_zfi86_items_inner_instance = AicusapicoQjcoKzzZFI86ItemsInner.from_json(json) +# print the JSON string representation of the object +print(AicusapicoQjcoKzzZFI86ItemsInner.to_json()) + +# convert the object into a dict +aicusapico_qjco_kzz_zfi86_items_inner_dict = aicusapico_qjco_kzz_zfi86_items_inner_instance.to_dict() +# create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a dict +aicusapico_qjco_kzz_zfi86_items_inner_from_dict = AicusapicoQjcoKzzZFI86ItemsInner.from_dict(aicusapico_qjco_kzz_zfi86_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/IntellapicoXeXaUMjaXtPx.md b/api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md similarity index 51% rename from api_test/biz_logic/rest_api/docs/IntellapicoXeXaUMjaXtPx.md rename to api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md index 9b2f52e24..8552def5a 100644 --- a/api_test/biz_logic/rest_api/docs/IntellapicoXeXaUMjaXtPx.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md @@ -1,4 +1,4 @@ -# IntellapicoXeXaUMjaXtPx +# AicusapicoUy1YBXiWJ5Aq ## Properties @@ -13,19 +13,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.intellapico_xe_xa_u_mja_xt_px import IntellapicoXeXaUMjaXtPx +from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq # TODO update the JSON string below json = "{}" -# create an instance of IntellapicoXeXaUMjaXtPx from a JSON string -intellapico_xe_xa_u_mja_xt_px_instance = IntellapicoXeXaUMjaXtPx.from_json(json) +# create an instance of AicusapicoUy1YBXiWJ5Aq from a JSON string +aicusapico_uy1_ybxi_wj5_aq_instance = AicusapicoUy1YBXiWJ5Aq.from_json(json) # print the JSON string representation of the object -print(IntellapicoXeXaUMjaXtPx.to_json()) +print(AicusapicoUy1YBXiWJ5Aq.to_json()) # convert the object into a dict -intellapico_xe_xa_u_mja_xt_px_dict = intellapico_xe_xa_u_mja_xt_px_instance.to_dict() -# create an instance of IntellapicoXeXaUMjaXtPx from a dict -intellapico_xe_xa_u_mja_xt_px_from_dict = IntellapicoXeXaUMjaXtPx.from_dict(intellapico_xe_xa_u_mja_xt_px_dict) +aicusapico_uy1_ybxi_wj5_aq_dict = aicusapico_uy1_ybxi_wj5_aq_instance.to_dict() +# create an instance of AicusapicoUy1YBXiWJ5Aq from a dict +aicusapico_uy1_ybxi_wj5_aq_from_dict = AicusapicoUy1YBXiWJ5Aq.from_dict(aicusapico_uy1_ybxi_wj5_aq_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md b/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md new file mode 100644 index 000000000..e27699397 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md @@ -0,0 +1,31 @@ +# Aicusapicob9jxGQ8zv1AS + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] +**items** | [**List[Aicusapicob9jxGQ8zv1ASItemsInner]**](Aicusapicob9jxGQ8zv1ASItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicob9jxGQ8zv1AS from a JSON string +aicusapicob9jx_gq8zv1_as_instance = Aicusapicob9jxGQ8zv1AS.from_json(json) +# print the JSON string representation of the object +print(Aicusapicob9jxGQ8zv1AS.to_json()) + +# convert the object into a dict +aicusapicob9jx_gq8zv1_as_dict = aicusapicob9jx_gq8zv1_as_instance.to_dict() +# create an instance of Aicusapicob9jxGQ8zv1AS from a dict +aicusapicob9jx_gq8zv1_as_from_dict = Aicusapicob9jxGQ8zv1AS.from_dict(aicusapicob9jx_gq8zv1_as_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md b/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md new file mode 100644 index 000000000..ab7e0b012 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md @@ -0,0 +1,38 @@ +# Aicusapicob9jxGQ8zv1ASItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**execution_id** | **str** | | +**file_name** | **str** | | +**create_by** | **str** | | +**chatbot_id** | **str** | | +**create_time** | **str** | | +**execution_status** | **str** | | +**index** | **str** | | +**model** | **str** | | +**details** | **str** | | +**tag** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a JSON string +aicusapicob9jx_gq8zv1_as_items_inner_instance = Aicusapicob9jxGQ8zv1ASItemsInner.from_json(json) +# print the JSON string representation of the object +print(Aicusapicob9jxGQ8zv1ASItemsInner.to_json()) + +# convert the object into a dict +aicusapicob9jx_gq8zv1_as_items_inner_dict = aicusapicob9jx_gq8zv1_as_items_inner_instance.to_dict() +# create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a dict +aicusapicob9jx_gq8zv1_as_items_inner_from_dict = Aicusapicob9jxGQ8zv1ASItemsInner.from_dict(aicusapicob9jx_gq8zv1_as_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md new file mode 100644 index 000000000..b7595bf4b --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md @@ -0,0 +1,32 @@ +# AicusapicobMN2pLK9AvE8 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**model_name** | **str** | | +**chatbot_id** | **str** | | +**model_id** | **str** | | +**index** | [**AicusapicobMN2pLK9AvE8Index**](AicusapicobMN2pLK9AvE8Index.md) | | + +## Example + +```python +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicobMN2pLK9AvE8 from a JSON string +aicusapicob_mn2p_lk9_av_e8_instance = AicusapicobMN2pLK9AvE8.from_json(json) +# print the JSON string representation of the object +print(AicusapicobMN2pLK9AvE8.to_json()) + +# convert the object into a dict +aicusapicob_mn2p_lk9_av_e8_dict = aicusapicob_mn2p_lk9_av_e8_instance.to_dict() +# create an instance of AicusapicobMN2pLK9AvE8 from a dict +aicusapicob_mn2p_lk9_av_e8_from_dict = AicusapicobMN2pLK9AvE8.from_dict(aicusapicob_mn2p_lk9_av_e8_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md new file mode 100644 index 000000000..5a9ddb6ce --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md @@ -0,0 +1,31 @@ +# AicusapicobMN2pLK9AvE8Index + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**qq** | **str** | | +**qd** | **str** | | +**intention** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicobMN2pLK9AvE8Index from a JSON string +aicusapicob_mn2p_lk9_av_e8_index_instance = AicusapicobMN2pLK9AvE8Index.from_json(json) +# print the JSON string representation of the object +print(AicusapicobMN2pLK9AvE8Index.to_json()) + +# convert the object into a dict +aicusapicob_mn2p_lk9_av_e8_index_dict = aicusapicob_mn2p_lk9_av_e8_index_instance.to_dict() +# create an instance of AicusapicobMN2pLK9AvE8Index from a dict +aicusapicob_mn2p_lk9_av_e8_index_from_dict = AicusapicobMN2pLK9AvE8Index.from_dict(aicusapicob_mn2p_lk9_av_e8_index_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md b/api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md new file mode 100644 index 000000000..01954317a --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md @@ -0,0 +1,32 @@ +# Aicusapicoh5w3FRwxBjhG + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**data** | **str** | | [optional] +**message** | **str** | | [optional] +**s3_prefix** | **str** | | [optional] +**s3_bucket** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicoh5w3FRwxBjhG from a JSON string +aicusapicoh5w3_f_rwx_bjh_g_instance = Aicusapicoh5w3FRwxBjhG.from_json(json) +# print the JSON string representation of the object +print(Aicusapicoh5w3FRwxBjhG.to_json()) + +# convert the object into a dict +aicusapicoh5w3_f_rwx_bjh_g_dict = aicusapicoh5w3_f_rwx_bjh_g_instance.to_dict() +# create an instance of Aicusapicoh5w3FRwxBjhG from a dict +aicusapicoh5w3_f_rwx_bjh_g_from_dict = Aicusapicoh5w3FRwxBjhG.from_dict(aicusapicoh5w3_f_rwx_bjh_g_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md new file mode 100644 index 000000000..76ecb8961 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md @@ -0,0 +1,32 @@ +# AicusapicohQbFv37cvtQS + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | [optional] +**group_name** | **str** | | [optional] +**index_ids** | [**AicusapicohQbFv37cvtQSIndexIds**](AicusapicohQbFv37cvtQSIndexIds.md) | | [optional] +**message** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicohQbFv37cvtQS from a JSON string +aicusapicoh_qb_fv37cvt_qs_instance = AicusapicohQbFv37cvtQS.from_json(json) +# print the JSON string representation of the object +print(AicusapicohQbFv37cvtQS.to_json()) + +# convert the object into a dict +aicusapicoh_qb_fv37cvt_qs_dict = aicusapicoh_qb_fv37cvt_qs_instance.to_dict() +# create an instance of AicusapicohQbFv37cvtQS from a dict +aicusapicoh_qb_fv37cvt_qs_from_dict = AicusapicohQbFv37cvtQS.from_dict(aicusapicoh_qb_fv37cvt_qs_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md new file mode 100644 index 000000000..b6afe94e8 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md @@ -0,0 +1,31 @@ +# AicusapicohQbFv37cvtQSIndexIds + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**qq** | **str** | | [optional] +**qd** | **str** | | [optional] +**intention** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicohQbFv37cvtQSIndexIds from a JSON string +aicusapicoh_qb_fv37cvt_qs_index_ids_instance = AicusapicohQbFv37cvtQSIndexIds.from_json(json) +# print the JSON string representation of the object +print(AicusapicohQbFv37cvtQSIndexIds.to_json()) + +# convert the object into a dict +aicusapicoh_qb_fv37cvt_qs_index_ids_dict = aicusapicoh_qb_fv37cvt_qs_index_ids_instance.to_dict() +# create an instance of AicusapicohQbFv37cvtQSIndexIds from a dict +aicusapicoh_qb_fv37cvt_qs_index_ids_from_dict = AicusapicohQbFv37cvtQSIndexIds.from_dict(aicusapicoh_qb_fv37cvt_qs_index_ids_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md new file mode 100644 index 000000000..62d46bd39 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md @@ -0,0 +1,32 @@ +# AicusapicoiXUam8N8Dh8l + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_ids** | **List[str]** | | [optional] +**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] +**items** | [**List[AicusapicoiXUam8N8Dh8lItemsInner]**](AicusapicoiXUam8N8Dh8lItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoiXUam8N8Dh8l from a JSON string +aicusapicoi_x_uam8_n8_dh8l_instance = AicusapicoiXUam8N8Dh8l.from_json(json) +# print the JSON string representation of the object +print(AicusapicoiXUam8N8Dh8l.to_json()) + +# convert the object into a dict +aicusapicoi_x_uam8_n8_dh8l_dict = aicusapicoi_x_uam8_n8_dh8l_instance.to_dict() +# create an instance of AicusapicoiXUam8N8Dh8l from a dict +aicusapicoi_x_uam8_n8_dh8l_from_dict = AicusapicoiXUam8N8Dh8l.from_dict(aicusapicoi_x_uam8_n8_dh8l_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md new file mode 100644 index 000000000..296417a0c --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md @@ -0,0 +1,32 @@ +# AicusapicoiXUam8N8Dh8lItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | +**model_name** | **str** | | +**last_modified_time** | **str** | | +**model_id** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a JSON string +aicusapicoi_x_uam8_n8_dh8l_items_inner_instance = AicusapicoiXUam8N8Dh8lItemsInner.from_json(json) +# print the JSON string representation of the object +print(AicusapicoiXUam8N8Dh8lItemsInner.to_json()) + +# convert the object into a dict +aicusapicoi_x_uam8_n8_dh8l_items_inner_dict = aicusapicoi_x_uam8_n8_dh8l_items_inner_instance.to_dict() +# create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a dict +aicusapicoi_x_uam8_n8_dh8l_items_inner_from_dict = AicusapicoiXUam8N8Dh8lItemsInner.from_dict(aicusapicoi_x_uam8_n8_dh8l_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md b/api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md new file mode 100644 index 000000000..70dc38633 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md @@ -0,0 +1,30 @@ +# Aicusapicoqew7t5vTA2ak + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**data** | **List[str]** | | [optional] +**message** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicoqew7t5vTA2ak from a JSON string +aicusapicoqew7t5v_ta2ak_instance = Aicusapicoqew7t5vTA2ak.from_json(json) +# print the JSON string representation of the object +print(Aicusapicoqew7t5vTA2ak.to_json()) + +# convert the object into a dict +aicusapicoqew7t5v_ta2ak_dict = aicusapicoqew7t5v_ta2ak_instance.to_dict() +# create an instance of Aicusapicoqew7t5vTA2ak from a dict +aicusapicoqew7t5v_ta2ak_from_dict = Aicusapicoqew7t5vTA2ak.from_dict(aicusapicoqew7t5v_ta2ak_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md b/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md new file mode 100644 index 000000000..1b9828fa2 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md @@ -0,0 +1,29 @@ +# Aicusapicor1Kt5C2mLnkm + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**execution_id** | **List[str]** | | + +## Example + +```python +from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicor1Kt5C2mLnkm from a JSON string +aicusapicor1_kt5_c2m_lnkm_instance = Aicusapicor1Kt5C2mLnkm.from_json(json) +# print the JSON string representation of the object +print(Aicusapicor1Kt5C2mLnkm.to_json()) + +# convert the object into a dict +aicusapicor1_kt5_c2m_lnkm_dict = aicusapicor1_kt5_c2m_lnkm_instance.to_dict() +# create an instance of Aicusapicor1Kt5C2mLnkm from a dict +aicusapicor1_kt5_c2m_lnkm_from_dict = Aicusapicor1Kt5C2mLnkm.from_dict(aicusapicor1_kt5_c2m_lnkm_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/IntellapicoNK9oLf1K1uex.md b/api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md similarity index 54% rename from api_test/biz_logic/rest_api/docs/IntellapicoNK9oLf1K1uex.md rename to api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md index 7967684c0..0484ad594 100644 --- a/api_test/biz_logic/rest_api/docs/IntellapicoNK9oLf1K1uex.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md @@ -1,4 +1,4 @@ -# IntellapicoNK9oLf1K1uex +# AicusapicoseOArXMRpSNs ## Properties @@ -16,19 +16,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.intellapico_nk9o_lf1_k1uex import IntellapicoNK9oLf1K1uex +from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs # TODO update the JSON string below json = "{}" -# create an instance of IntellapicoNK9oLf1K1uex from a JSON string -intellapico_nk9o_lf1_k1uex_instance = IntellapicoNK9oLf1K1uex.from_json(json) +# create an instance of AicusapicoseOArXMRpSNs from a JSON string +aicusapicose_oar_xmrp_sns_instance = AicusapicoseOArXMRpSNs.from_json(json) # print the JSON string representation of the object -print(IntellapicoNK9oLf1K1uex.to_json()) +print(AicusapicoseOArXMRpSNs.to_json()) # convert the object into a dict -intellapico_nk9o_lf1_k1uex_dict = intellapico_nk9o_lf1_k1uex_instance.to_dict() -# create an instance of IntellapicoNK9oLf1K1uex from a dict -intellapico_nk9o_lf1_k1uex_from_dict = IntellapicoNK9oLf1K1uex.from_dict(intellapico_nk9o_lf1_k1uex_dict) +aicusapicose_oar_xmrp_sns_dict = aicusapicose_oar_xmrp_sns_instance.to_dict() +# create an instance of AicusapicoseOArXMRpSNs from a dict +aicusapicose_oar_xmrp_sns_from_dict = AicusapicoseOArXMRpSNs.from_dict(aicusapicose_oar_xmrp_sns_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/DefaultApi.md b/api_test/biz_logic/rest_api/docs/DefaultApi.md index c7e9dcd68..99557ce54 100644 --- a/api_test/biz_logic/rest_api/docs/DefaultApi.md +++ b/api_test/biz_logic/rest_api/docs/DefaultApi.md @@ -1,6 +1,6 @@ # openapi_client.DefaultApi -All URIs are relative to *https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod* +All URIs are relative to *https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod* Method | HTTP request | Description ------------- | ------------- | ------------- @@ -16,9 +16,28 @@ Method | HTTP request | Description [**chatbot_management_chatbots_get**](DefaultApi.md#chatbot_management_chatbots_get) | **GET** /chatbot-management/chatbots | [**chatbot_management_chatbots_options**](DefaultApi.md#chatbot_management_chatbots_options) | **OPTIONS** /chatbot-management/chatbots | [**chatbot_management_chatbots_post**](DefaultApi.md#chatbot_management_chatbots_post) | **POST** /chatbot-management/chatbots | +[**chatbot_management_check_chatbot_options**](DefaultApi.md#chatbot_management_check_chatbot_options) | **OPTIONS** /chatbot-management/check-chatbot | +[**chatbot_management_check_chatbot_post**](DefaultApi.md#chatbot_management_check_chatbot_post) | **POST** /chatbot-management/check-chatbot | +[**chatbot_management_check_default_chatbot_get**](DefaultApi.md#chatbot_management_check_default_chatbot_get) | **GET** /chatbot-management/check-default-chatbot | +[**chatbot_management_check_default_chatbot_options**](DefaultApi.md#chatbot_management_check_default_chatbot_options) | **OPTIONS** /chatbot-management/check-default-chatbot | +[**chatbot_management_embeddings_get**](DefaultApi.md#chatbot_management_embeddings_get) | **GET** /chatbot-management/embeddings | +[**chatbot_management_embeddings_options**](DefaultApi.md#chatbot_management_embeddings_options) | **OPTIONS** /chatbot-management/embeddings | [**chatbot_management_options**](DefaultApi.md#chatbot_management_options) | **OPTIONS** /chatbot-management | [**extract_options**](DefaultApi.md#extract_options) | **OPTIONS** /extract | [**extract_post**](DefaultApi.md#extract_post) | **POST** /extract | +[**intention_download_template_get**](DefaultApi.md#intention_download_template_get) | **GET** /intention/download-template | +[**intention_download_template_options**](DefaultApi.md#intention_download_template_options) | **OPTIONS** /intention/download-template | +[**intention_execution_presigned_url_options**](DefaultApi.md#intention_execution_presigned_url_options) | **OPTIONS** /intention/execution-presigned-url | +[**intention_execution_presigned_url_post**](DefaultApi.md#intention_execution_presigned_url_post) | **POST** /intention/execution-presigned-url | +[**intention_executions_delete**](DefaultApi.md#intention_executions_delete) | **DELETE** /intention/executions | +[**intention_executions_execution_id_get**](DefaultApi.md#intention_executions_execution_id_get) | **GET** /intention/executions/{executionId} | +[**intention_executions_execution_id_options**](DefaultApi.md#intention_executions_execution_id_options) | **OPTIONS** /intention/executions/{executionId} | +[**intention_executions_get**](DefaultApi.md#intention_executions_get) | **GET** /intention/executions | +[**intention_executions_options**](DefaultApi.md#intention_executions_options) | **OPTIONS** /intention/executions | +[**intention_executions_post**](DefaultApi.md#intention_executions_post) | **POST** /intention/executions | +[**intention_index_used_scan_options**](DefaultApi.md#intention_index_used_scan_options) | **OPTIONS** /intention/index-used-scan | +[**intention_index_used_scan_post**](DefaultApi.md#intention_index_used_scan_post) | **POST** /intention/index-used-scan | +[**intention_options**](DefaultApi.md#intention_options) | **OPTIONS** /intention | [**knowledge_base_executions_delete**](DefaultApi.md#knowledge_base_executions_delete) | **DELETE** /knowledge-base/executions | [**knowledge_base_executions_execution_id_get**](DefaultApi.md#knowledge_base_executions_execution_id_get) | **GET** /knowledge-base/executions/{executionId} | [**knowledge_base_executions_execution_id_options**](DefaultApi.md#knowledge_base_executions_execution_id_options) | **OPTIONS** /knowledge-base/executions/{executionId} | @@ -48,17 +67,17 @@ Method | HTTP request | Description ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -66,11 +85,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -97,7 +116,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -127,10 +146,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -179,17 +198,17 @@ No authorization required ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -197,11 +216,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -228,7 +247,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -252,17 +271,17 @@ This endpoint does not need any parameter. ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -270,11 +289,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -301,7 +320,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -331,10 +350,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -389,10 +408,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -441,17 +460,17 @@ No authorization required ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -459,11 +478,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -490,7 +509,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -514,17 +533,17 @@ This endpoint does not need any parameter. ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -532,11 +551,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -563,7 +582,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -593,10 +612,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -639,23 +658,24 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **chatbot_management_chatbots_get** -> object chatbot_management_chatbots_get() +> AicusapicoiXUam8N8Dh8l chatbot_management_chatbots_get(page_size=page_size, max_items=max_items) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -663,19 +683,21 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) + page_size = 'page_size_example' # str | (optional) + max_items = 'max_items_example' # str | (optional) try: - api_response = api_instance.chatbot_management_chatbots_get() + api_response = api_instance.chatbot_management_chatbots_get(page_size=page_size, max_items=max_items) print("The response of DefaultApi->chatbot_management_chatbots_get:\n") pprint(api_response) except Exception as e: @@ -686,15 +708,19 @@ with openapi_client.ApiClient(configuration) as api_client: ### Parameters -This endpoint does not need any parameter. + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_size** | **str**| | [optional] + **max_items** | **str**| | [optional] ### Return type -**object** +[**AicusapicoiXUam8N8Dh8l**](AicusapicoiXUam8N8Dh8l.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -724,10 +750,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -770,23 +796,25 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **chatbot_management_chatbots_post** -> object chatbot_management_chatbots_post() +> AicusapicohQbFv37cvtQS chatbot_management_chatbots_post(aicusapicob_mn2p_lk9_av_e8) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -794,19 +822,20 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) + aicusapicob_mn2p_lk9_av_e8 = openapi_client.AicusapicobMN2pLK9AvE8() # AicusapicobMN2pLK9AvE8 | try: - api_response = api_instance.chatbot_management_chatbots_post() + api_response = api_instance.chatbot_management_chatbots_post(aicusapicob_mn2p_lk9_av_e8) print("The response of DefaultApi->chatbot_management_chatbots_post:\n") pprint(api_response) except Exception as e: @@ -817,19 +846,22 @@ with openapi_client.ApiClient(configuration) as api_client: ### Parameters -This endpoint does not need any parameter. + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **aicusapicob_mn2p_lk9_av_e8** | [**AicusapicobMN2pLK9AvE8**](AicusapicobMN2pLK9AvE8.md)| | ### Return type -**object** +[**AicusapicohQbFv37cvtQS**](AicusapicohQbFv37cvtQS.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers - - **Content-Type**: Not defined + - **Content-Type**: application/json - **Accept**: application/json ### HTTP response details @@ -842,8 +874,8 @@ This endpoint does not need any parameter. [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **chatbot_management_options** -> chatbot_management_options() +# **chatbot_management_check_chatbot_options** +> chatbot_management_check_chatbot_options() @@ -855,10 +887,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -868,9 +900,9 @@ with openapi_client.ApiClient(configuration) as api_client: api_instance = openapi_client.DefaultApi(api_client) try: - api_instance.chatbot_management_options() + api_instance.chatbot_management_check_chatbot_options() except Exception as e: - print("Exception when calling DefaultApi->chatbot_management_options: %s\n" % e) + print("Exception when calling DefaultApi->chatbot_management_check_chatbot_options: %s\n" % e) ``` @@ -900,25 +932,36 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **extract_options** -> extract_options() +# **chatbot_management_check_chatbot_post** +> object chatbot_management_check_chatbot_post() ### Example +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -926,9 +969,144 @@ with openapi_client.ApiClient(configuration) as api_client: api_instance = openapi_client.DefaultApi(api_client) try: - api_instance.extract_options() + api_response = api_instance.chatbot_management_check_chatbot_post() + print("The response of DefaultApi->chatbot_management_check_chatbot_post:\n") + pprint(api_response) except Exception as e: - print("Exception when calling DefaultApi->extract_options: %s\n" % e) + print("Exception when calling DefaultApi->chatbot_management_check_chatbot_post: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **chatbot_management_check_default_chatbot_get** +> object chatbot_management_check_default_chatbot_get() + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_response = api_instance.chatbot_management_check_default_chatbot_get() + print("The response of DefaultApi->chatbot_management_check_default_chatbot_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->chatbot_management_check_default_chatbot_get: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **chatbot_management_check_default_chatbot_options** +> chatbot_management_check_default_chatbot_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.chatbot_management_check_default_chatbot_options() + except Exception as e: + print("Exception when calling DefaultApi->chatbot_management_check_default_chatbot_options: %s\n" % e) ``` @@ -958,24 +1136,24 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **extract_post** -> object extract_post() +# **chatbot_management_embeddings_get** +> object chatbot_management_embeddings_get() ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -983,11 +1161,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -995,11 +1173,11 @@ with openapi_client.ApiClient(configuration) as api_client: api_instance = openapi_client.DefaultApi(api_client) try: - api_response = api_instance.extract_post() - print("The response of DefaultApi->extract_post:\n") + api_response = api_instance.chatbot_management_embeddings_get() + print("The response of DefaultApi->chatbot_management_embeddings_get:\n") pprint(api_response) except Exception as e: - print("Exception when calling DefaultApi->extract_post: %s\n" % e) + print("Exception when calling DefaultApi->chatbot_management_embeddings_get: %s\n" % e) ``` @@ -1014,7 +1192,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1031,26 +1209,1162 @@ This endpoint does not need any parameter. [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) +# **chatbot_management_embeddings_options** +> chatbot_management_embeddings_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.chatbot_management_embeddings_options() + except Exception as e: + print("Exception when calling DefaultApi->chatbot_management_embeddings_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **chatbot_management_options** +> chatbot_management_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.chatbot_management_options() + except Exception as e: + print("Exception when calling DefaultApi->chatbot_management_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **extract_options** +> extract_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.extract_options() + except Exception as e: + print("Exception when calling DefaultApi->extract_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **extract_post** +> object extract_post() + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_response = api_instance.extract_post() + print("The response of DefaultApi->extract_post:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->extract_post: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_download_template_get** +> object intention_download_template_get() + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_response = api_instance.intention_download_template_get() + print("The response of DefaultApi->intention_download_template_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_download_template_get: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_download_template_options** +> intention_download_template_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.intention_download_template_options() + except Exception as e: + print("Exception when calling DefaultApi->intention_download_template_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_execution_presigned_url_options** +> intention_execution_presigned_url_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.intention_execution_presigned_url_options() + except Exception as e: + print("Exception when calling DefaultApi->intention_execution_presigned_url_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_execution_presigned_url_post** +> AicusapicoUy1YBXiWJ5Aq intention_execution_presigned_url_post(aicusapico_cyd129_m65y_kv) + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV +from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + aicusapico_cyd129_m65y_kv = openapi_client.AicusapicoCyd129M65yKV() # AicusapicoCyd129M65yKV | + + try: + api_response = api_instance.intention_execution_presigned_url_post(aicusapico_cyd129_m65y_kv) + print("The response of DefaultApi->intention_execution_presigned_url_post:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_execution_presigned_url_post: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **aicusapico_cyd129_m65y_kv** | [**AicusapicoCyd129M65yKV**](AicusapicoCyd129M65yKV.md)| | + +### Return type + +[**AicusapicoUy1YBXiWJ5Aq**](AicusapicoUy1YBXiWJ5Aq.md) + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_delete** +> object intention_executions_delete() + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_response = api_instance.intention_executions_delete() + print("The response of DefaultApi->intention_executions_delete:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_delete: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_execution_id_get** +> Aicusapico35klzY80ikPh intention_executions_execution_id_get(intention_id, execution_id) + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + intention_id = 'intention_id_example' # str | + execution_id = 'execution_id_example' # str | + + try: + api_response = api_instance.intention_executions_execution_id_get(intention_id, execution_id) + print("The response of DefaultApi->intention_executions_execution_id_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_execution_id_get: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **intention_id** | **str**| | + **execution_id** | **str**| | + +### Return type + +[**Aicusapico35klzY80ikPh**](Aicusapico35klzY80ikPh.md) + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_execution_id_options** +> intention_executions_execution_id_options(execution_id) + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + execution_id = 'execution_id_example' # str | + + try: + api_instance.intention_executions_execution_id_options(execution_id) + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_execution_id_options: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **execution_id** | **str**| | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_get** +> Aicusapicob9jxGQ8zv1AS intention_executions_get(page_size=page_size, max_items=max_items) + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + page_size = 'page_size_example' # str | (optional) + max_items = 'max_items_example' # str | (optional) + + try: + api_response = api_instance.intention_executions_get(page_size=page_size, max_items=max_items) + print("The response of DefaultApi->intention_executions_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_get: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **page_size** | **str**| | [optional] + **max_items** | **str**| | [optional] + +### Return type + +[**Aicusapicob9jxGQ8zv1AS**](Aicusapicob9jxGQ8zv1AS.md) + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_options** +> intention_executions_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.intention_executions_options() + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_executions_post** +> Aicusapico4rwMspzeBOe5 intention_executions_post(aicusapico_eoc_l_nul8cwxa) + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 +from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + aicusapico_eoc_l_nul8cwxa = openapi_client.AicusapicoEOcLNul8cwxa() # AicusapicoEOcLNul8cwxa | + + try: + api_response = api_instance.intention_executions_post(aicusapico_eoc_l_nul8cwxa) + print("The response of DefaultApi->intention_executions_post:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_executions_post: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **aicusapico_eoc_l_nul8cwxa** | [**AicusapicoEOcLNul8cwxa**](AicusapicoEOcLNul8cwxa.md)| | + +### Return type + +[**Aicusapico4rwMspzeBOe5**](Aicusapico4rwMspzeBOe5.md) + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_index_used_scan_options** +> intention_index_used_scan_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.intention_index_used_scan_options() + except Exception as e: + print("Exception when calling DefaultApi->intention_index_used_scan_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_index_used_scan_post** +> object intention_index_used_scan_post() + + + +### Example + +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_response = api_instance.intention_index_used_scan_post() + print("The response of DefaultApi->intention_index_used_scan_post:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling DefaultApi->intention_index_used_scan_post: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**400** | 400 response | - | +**500** | 500 response | - | +**200** | 200 response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **intention_options** +> intention_options() + + + +### Example + + +```python +import openapi_client +from openapi_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# See configuration.py for a list of all supported configuration parameters. +configuration = openapi_client.Configuration( + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" +) + + +# Enter a context with an instance of the API client +with openapi_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = openapi_client.DefaultApi(api_client) + + try: + api_instance.intention_options() + except Exception as e: + print("Exception when calling DefaultApi->intention_options: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**204** | 204 response | * Access-Control-Allow-Origin -
* Access-Control-Allow-Methods -
* Access-Control-Allow-Credentials -
* Access-Control-Allow-Headers -
| + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **knowledge_base_executions_delete** -> IntellapicoNbA0nyPxxk6q knowledge_base_executions_delete(intellapico_h4_a9yvm8c1p3) +> Aicusapicoqew7t5vTA2ak knowledge_base_executions_delete(aicusapicor1_kt5_c2m_lnkm) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client -from openapi_client.models.intellapico_h4_a9yvm8c1p3 import IntellapicoH4A9yvm8c1p3 -from openapi_client.models.intellapico_nb_a0ny_pxxk6q import IntellapicoNbA0nyPxxk6q +from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak +from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1058,20 +2372,20 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - intellapico_h4_a9yvm8c1p3 = openapi_client.IntellapicoH4A9yvm8c1p3() # IntellapicoH4A9yvm8c1p3 | + aicusapicor1_kt5_c2m_lnkm = openapi_client.Aicusapicor1Kt5C2mLnkm() # Aicusapicor1Kt5C2mLnkm | try: - api_response = api_instance.knowledge_base_executions_delete(intellapico_h4_a9yvm8c1p3) + api_response = api_instance.knowledge_base_executions_delete(aicusapicor1_kt5_c2m_lnkm) print("The response of DefaultApi->knowledge_base_executions_delete:\n") pprint(api_response) except Exception as e: @@ -1085,15 +2399,15 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **intellapico_h4_a9yvm8c1p3** | [**IntellapicoH4A9yvm8c1p3**](IntellapicoH4A9yvm8c1p3.md)| | + **aicusapicor1_kt5_c2m_lnkm** | [**Aicusapicor1Kt5C2mLnkm**](Aicusapicor1Kt5C2mLnkm.md)| | ### Return type -[**IntellapicoNbA0nyPxxk6q**](IntellapicoNbA0nyPxxk6q.md) +[**Aicusapicoqew7t5vTA2ak**](Aicusapicoqew7t5vTA2ak.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1111,24 +2425,24 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_execution_id_get** -> IntellapicowXaFAEWeTgPt knowledge_base_executions_execution_id_get(execution_id) +> AicusapicoQjcoKzzZFI86 knowledge_base_executions_execution_id_get(execution_id) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client -from openapi_client.models.intellapicow_xa_faewe_tg_pt import IntellapicowXaFAEWeTgPt +from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1136,11 +2450,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -1167,11 +2481,11 @@ Name | Type | Description | Notes ### Return type -[**IntellapicowXaFAEWeTgPt**](IntellapicowXaFAEWeTgPt.md) +[**AicusapicoQjcoKzzZFI86**](AicusapicoQjcoKzzZFI86.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1201,10 +2515,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1251,24 +2565,24 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_get** -> IntellapicorVOJKT5wIzUC knowledge_base_executions_get(page_size=page_size, max_items=max_items) +> Aicusapico2TwvXbhsTncy knowledge_base_executions_get(page_size=page_size, max_items=max_items) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client -from openapi_client.models.intellapicor_vojkt5w_iz_uc import IntellapicorVOJKT5wIzUC +from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1276,11 +2590,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -1309,11 +2623,11 @@ Name | Type | Description | Notes ### Return type -[**IntellapicorVOJKT5wIzUC**](IntellapicorVOJKT5wIzUC.md) +[**Aicusapico2TwvXbhsTncy**](Aicusapico2TwvXbhsTncy.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1343,10 +2657,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1389,24 +2703,24 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_post** -> object knowledge_base_executions_post(intellapico_nk9o_lf1_k1uex) +> object knowledge_base_executions_post(aicusapicose_oar_xmrp_sns) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client -from openapi_client.models.intellapico_nk9o_lf1_k1uex import IntellapicoNK9oLf1K1uex +from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1414,20 +2728,20 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - intellapico_nk9o_lf1_k1uex = openapi_client.IntellapicoNK9oLf1K1uex() # IntellapicoNK9oLf1K1uex | + aicusapicose_oar_xmrp_sns = openapi_client.AicusapicoseOArXMRpSNs() # AicusapicoseOArXMRpSNs | try: - api_response = api_instance.knowledge_base_executions_post(intellapico_nk9o_lf1_k1uex) + api_response = api_instance.knowledge_base_executions_post(aicusapicose_oar_xmrp_sns) print("The response of DefaultApi->knowledge_base_executions_post:\n") pprint(api_response) except Exception as e: @@ -1441,7 +2755,7 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **intellapico_nk9o_lf1_k1uex** | [**IntellapicoNK9oLf1K1uex**](IntellapicoNK9oLf1K1uex.md)| | + **aicusapicose_oar_xmrp_sns** | [**AicusapicoseOArXMRpSNs**](AicusapicoseOArXMRpSNs.md)| | ### Return type @@ -1449,7 +2763,7 @@ Name | Type | Description | Notes ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1479,10 +2793,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1525,25 +2839,25 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_kb_presigned_url_post** -> IntellapicoXeXaUMjaXtPx knowledge_base_kb_presigned_url_post(intellapicormo5_lbzxs9_rb) +> Aicusapicoh5w3FRwxBjhG knowledge_base_kb_presigned_url_post(aicusapico51_raf_cay_oxi_z) ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client -from openapi_client.models.intellapico_xe_xa_u_mja_xt_px import IntellapicoXeXaUMjaXtPx -from openapi_client.models.intellapicormo5_lbzxs9_rb import Intellapicormo5LBZXS9Rb +from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ +from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1551,20 +2865,20 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - intellapicormo5_lbzxs9_rb = openapi_client.Intellapicormo5LBZXS9Rb() # Intellapicormo5LBZXS9Rb | + aicusapico51_raf_cay_oxi_z = openapi_client.Aicusapico51RafCAYOxiZ() # Aicusapico51RafCAYOxiZ | try: - api_response = api_instance.knowledge_base_kb_presigned_url_post(intellapicormo5_lbzxs9_rb) + api_response = api_instance.knowledge_base_kb_presigned_url_post(aicusapico51_raf_cay_oxi_z) print("The response of DefaultApi->knowledge_base_kb_presigned_url_post:\n") pprint(api_response) except Exception as e: @@ -1578,15 +2892,15 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **intellapicormo5_lbzxs9_rb** | [**Intellapicormo5LBZXS9Rb**](Intellapicormo5LBZXS9Rb.md)| | + **aicusapico51_raf_cay_oxi_z** | [**Aicusapico51RafCAYOxiZ**](Aicusapico51RafCAYOxiZ.md)| | ### Return type -[**IntellapicoXeXaUMjaXtPx**](IntellapicoXeXaUMjaXtPx.md) +[**Aicusapicoh5w3FRwxBjhG**](Aicusapicoh5w3FRwxBjhG.md) ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1616,10 +2930,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1674,10 +2988,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1726,17 +3040,17 @@ No authorization required ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1744,11 +3058,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -1775,7 +3089,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1799,17 +3113,17 @@ This endpoint does not need any parameter. ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1817,11 +3131,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -1848,7 +3162,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -1878,10 +3192,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1936,10 +3250,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1988,17 +3302,17 @@ No authorization required ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2006,11 +3320,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -2037,7 +3351,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -2067,10 +3381,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2119,17 +3433,17 @@ No authorization required ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2137,11 +3451,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -2168,7 +3482,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -2192,17 +3506,17 @@ This endpoint does not need any parameter. ### Example -* Api Key Authentication (intelliagentapiconstructApiAuthorizerFB94A0DF): +* Api Key Authentication (aicustomerserviceapiconstructApiAuthorizerEB0B49FC): ```python import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2210,11 +3524,11 @@ configuration = openapi_client.Configuration( # Examples for each auth method are provided below, use the example that # satisfies your auth use case. -# Configure API key authorization: intelliagentapiconstructApiAuthorizerFB94A0DF -configuration.api_key['intelliagentapiconstructApiAuthorizerFB94A0DF'] = os.environ["API_KEY"] +# Configure API key authorization: aicustomerserviceapiconstructApiAuthorizerEB0B49FC +configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os.environ["API_KEY"] # Uncomment below to setup prefix (e.g. Bearer) for API key, if needed -# configuration.api_key_prefix['intelliagentapiconstructApiAuthorizerFB94A0DF'] = 'Bearer' +# configuration.api_key_prefix['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = 'Bearer' # Enter a context with an instance of the API client with openapi_client.ApiClient(configuration) as api_client: @@ -2241,7 +3555,7 @@ This endpoint does not need any parameter. ### Authorization -[intelliagentapiconstructApiAuthorizerFB94A0DF](../README.md#intelliagentapiconstructApiAuthorizerFB94A0DF) +[aicustomerserviceapiconstructApiAuthorizerEB0B49FC](../README.md#aicustomerserviceapiconstructApiAuthorizerEB0B49FC) ### HTTP request headers @@ -2271,10 +3585,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2329,10 +3643,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" + host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" ) diff --git a/api_test/biz_logic/rest_api/docs/IntellapicoH4A9yvm8c1p3.md b/api_test/biz_logic/rest_api/docs/IntellapicoH4A9yvm8c1p3.md deleted file mode 100644 index fd222fc16..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicoH4A9yvm8c1p3.md +++ /dev/null @@ -1,29 +0,0 @@ -# IntellapicoH4A9yvm8c1p3 - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**execution_id** | **List[str]** | | - -## Example - -```python -from openapi_client.models.intellapico_h4_a9yvm8c1p3 import IntellapicoH4A9yvm8c1p3 - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicoH4A9yvm8c1p3 from a JSON string -intellapico_h4_a9yvm8c1p3_instance = IntellapicoH4A9yvm8c1p3.from_json(json) -# print the JSON string representation of the object -print(IntellapicoH4A9yvm8c1p3.to_json()) - -# convert the object into a dict -intellapico_h4_a9yvm8c1p3_dict = intellapico_h4_a9yvm8c1p3_instance.to_dict() -# create an instance of IntellapicoH4A9yvm8c1p3 from a dict -intellapico_h4_a9yvm8c1p3_from_dict = IntellapicoH4A9yvm8c1p3.from_dict(intellapico_h4_a9yvm8c1p3_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/IntellapicoNbA0nyPxxk6q.md b/api_test/biz_logic/rest_api/docs/IntellapicoNbA0nyPxxk6q.md deleted file mode 100644 index ed167a8fe..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicoNbA0nyPxxk6q.md +++ /dev/null @@ -1,30 +0,0 @@ -# IntellapicoNbA0nyPxxk6q - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**data** | **List[str]** | | [optional] -**message** | **str** | | [optional] - -## Example - -```python -from openapi_client.models.intellapico_nb_a0ny_pxxk6q import IntellapicoNbA0nyPxxk6q - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicoNbA0nyPxxk6q from a JSON string -intellapico_nb_a0ny_pxxk6q_instance = IntellapicoNbA0nyPxxk6q.from_json(json) -# print the JSON string representation of the object -print(IntellapicoNbA0nyPxxk6q.to_json()) - -# convert the object into a dict -intellapico_nb_a0ny_pxxk6q_dict = intellapico_nb_a0ny_pxxk6q_instance.to_dict() -# create an instance of IntellapicoNbA0nyPxxk6q from a dict -intellapico_nb_a0ny_pxxk6q_from_dict = IntellapicoNbA0nyPxxk6q.from_dict(intellapico_nb_a0ny_pxxk6q_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUC.md b/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUC.md deleted file mode 100644 index c91a9457a..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUC.md +++ /dev/null @@ -1,31 +0,0 @@ -# IntellapicorVOJKT5wIzUC - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**config** | [**IntellapicorVOJKT5wIzUCConfig**](IntellapicorVOJKT5wIzUCConfig.md) | | [optional] -**items** | [**List[IntellapicorVOJKT5wIzUCItemsInner]**](IntellapicorVOJKT5wIzUCItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.intellapicor_vojkt5w_iz_uc import IntellapicorVOJKT5wIzUC - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicorVOJKT5wIzUC from a JSON string -intellapicor_vojkt5w_iz_uc_instance = IntellapicorVOJKT5wIzUC.from_json(json) -# print the JSON string representation of the object -print(IntellapicorVOJKT5wIzUC.to_json()) - -# convert the object into a dict -intellapicor_vojkt5w_iz_uc_dict = intellapicor_vojkt5w_iz_uc_instance.to_dict() -# create an instance of IntellapicorVOJKT5wIzUC from a dict -intellapicor_vojkt5w_iz_uc_from_dict = IntellapicorVOJKT5wIzUC.from_dict(intellapicor_vojkt5w_iz_uc_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCConfig.md b/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCConfig.md deleted file mode 100644 index b95d2aa1d..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicorVOJKT5wIzUCConfig.md +++ /dev/null @@ -1,30 +0,0 @@ -# IntellapicorVOJKT5wIzUCConfig - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**page_size** | **int** | | [optional] -**max_items** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.intellapicor_vojkt5w_iz_uc_config import IntellapicorVOJKT5wIzUCConfig - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicorVOJKT5wIzUCConfig from a JSON string -intellapicor_vojkt5w_iz_uc_config_instance = IntellapicorVOJKT5wIzUCConfig.from_json(json) -# print the JSON string representation of the object -print(IntellapicorVOJKT5wIzUCConfig.to_json()) - -# convert the object into a dict -intellapicor_vojkt5w_iz_uc_config_dict = intellapicor_vojkt5w_iz_uc_config_instance.to_dict() -# create an instance of IntellapicorVOJKT5wIzUCConfig from a dict -intellapicor_vojkt5w_iz_uc_config_from_dict = IntellapicorVOJKT5wIzUCConfig.from_dict(intellapicor_vojkt5w_iz_uc_config_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Intellapicormo5LBZXS9Rb.md b/api_test/biz_logic/rest_api/docs/Intellapicormo5LBZXS9Rb.md deleted file mode 100644 index e386838a2..000000000 --- a/api_test/biz_logic/rest_api/docs/Intellapicormo5LBZXS9Rb.md +++ /dev/null @@ -1,30 +0,0 @@ -# Intellapicormo5LBZXS9Rb - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**content_type** | **str** | | -**file_name** | **str** | | - -## Example - -```python -from openapi_client.models.intellapicormo5_lbzxs9_rb import Intellapicormo5LBZXS9Rb - -# TODO update the JSON string below -json = "{}" -# create an instance of Intellapicormo5LBZXS9Rb from a JSON string -intellapicormo5_lbzxs9_rb_instance = Intellapicormo5LBZXS9Rb.from_json(json) -# print the JSON string representation of the object -print(Intellapicormo5LBZXS9Rb.to_json()) - -# convert the object into a dict -intellapicormo5_lbzxs9_rb_dict = intellapicormo5_lbzxs9_rb_instance.to_dict() -# create an instance of Intellapicormo5LBZXS9Rb from a dict -intellapicormo5_lbzxs9_rb_from_dict = Intellapicormo5LBZXS9Rb.from_dict(intellapicormo5_lbzxs9_rb_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPt.md b/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPt.md deleted file mode 100644 index 5e24e0573..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPt.md +++ /dev/null @@ -1,30 +0,0 @@ -# IntellapicowXaFAEWeTgPt - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**items** | [**List[IntellapicowXaFAEWeTgPtItemsInner]**](IntellapicowXaFAEWeTgPtItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.intellapicow_xa_faewe_tg_pt import IntellapicowXaFAEWeTgPt - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicowXaFAEWeTgPt from a JSON string -intellapicow_xa_faewe_tg_pt_instance = IntellapicowXaFAEWeTgPt.from_json(json) -# print the JSON string representation of the object -print(IntellapicowXaFAEWeTgPt.to_json()) - -# convert the object into a dict -intellapicow_xa_faewe_tg_pt_dict = intellapicow_xa_faewe_tg_pt_instance.to_dict() -# create an instance of IntellapicowXaFAEWeTgPt from a dict -intellapicow_xa_faewe_tg_pt_from_dict = IntellapicowXaFAEWeTgPt.from_dict(intellapicow_xa_faewe_tg_pt_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPtItemsInner.md b/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPtItemsInner.md deleted file mode 100644 index f0cec3942..000000000 --- a/api_test/biz_logic/rest_api/docs/IntellapicowXaFAEWeTgPtItemsInner.md +++ /dev/null @@ -1,34 +0,0 @@ -# IntellapicowXaFAEWeTgPtItemsInner - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**execution_id** | **str** | | -**s3_path** | **str** | | -**create_time** | **str** | | -**s3_prefix** | **str** | | -**s3_bucket** | **str** | | -**status** | **str** | | - -## Example - -```python -from openapi_client.models.intellapicow_xa_faewe_tg_pt_items_inner import IntellapicowXaFAEWeTgPtItemsInner - -# TODO update the JSON string below -json = "{}" -# create an instance of IntellapicowXaFAEWeTgPtItemsInner from a JSON string -intellapicow_xa_faewe_tg_pt_items_inner_instance = IntellapicowXaFAEWeTgPtItemsInner.from_json(json) -# print the JSON string representation of the object -print(IntellapicowXaFAEWeTgPtItemsInner.to_json()) - -# convert the object into a dict -intellapicow_xa_faewe_tg_pt_items_inner_dict = intellapicow_xa_faewe_tg_pt_items_inner_instance.to_dict() -# create an instance of IntellapicowXaFAEWeTgPtItemsInner from a dict -intellapicow_xa_faewe_tg_pt_items_inner_from_dict = IntellapicowXaFAEWeTgPtItemsInner.from_dict(intellapicow_xa_faewe_tg_pt_items_inner_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/openapi_client/__init__.py b/api_test/biz_logic/rest_api/openapi_client/__init__.py index 0e3d7de7f..54840b84e 100644 --- a/api_test/biz_logic/rest_api/openapi_client/__init__.py +++ b/api_test/biz_logic/rest_api/openapi_client/__init__.py @@ -3,11 +3,11 @@ # flake8: noqa """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -15,6 +15,7 @@ __version__ = "1.0.0" + import sys import os # 获取 openapi_client 目录的路径 @@ -36,13 +37,29 @@ from openapi_client.exceptions import ApiException # import models into sdk package -from openapi_client.models.intellapico_h4_a9yvm8c1p3 import IntellapicoH4A9yvm8c1p3 -from openapi_client.models.intellapico_nk9o_lf1_k1uex import IntellapicoNK9oLf1K1uex -from openapi_client.models.intellapico_nb_a0ny_pxxk6q import IntellapicoNbA0nyPxxk6q -from openapi_client.models.intellapico_xe_xa_u_mja_xt_px import IntellapicoXeXaUMjaXtPx -from openapi_client.models.intellapicor_vojkt5w_iz_uc import IntellapicorVOJKT5wIzUC -from openapi_client.models.intellapicor_vojkt5w_iz_uc_config import IntellapicorVOJKT5wIzUCConfig -from openapi_client.models.intellapicor_vojkt5w_iz_uc_items_inner import IntellapicorVOJKT5wIzUCItemsInner -from openapi_client.models.intellapicormo5_lbzxs9_rb import Intellapicormo5LBZXS9Rb -from openapi_client.models.intellapicow_xa_faewe_tg_pt import IntellapicowXaFAEWeTgPt -from openapi_client.models.intellapicow_xa_faewe_tg_pt_items_inner import IntellapicowXaFAEWeTgPtItemsInner +from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig +from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner +from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner +from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 +from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload +from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ +from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV +from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 +from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner +from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq +from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS +from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index +from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner +from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak +from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm +from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs diff --git a/api_test/biz_logic/rest_api/openapi_client/api/default_api.py b/api_test/biz_logic/rest_api/openapi_client/api/default_api.py index 86ae0b555..59b4c90bb 100644 --- a/api_test/biz_logic/rest_api/openapi_client/api/default_api.py +++ b/api_test/biz_logic/rest_api/openapi_client/api/default_api.py @@ -1,30 +1,42 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. """ # noqa: E501 import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +# from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import StrictFloat, StrictStr, StrictInt, validate_call, BaseModel, Field +# from pydantic.aliases import Field from typing_extensions import Annotated -from pydantic import StrictStr +# from pydantic import StrictStr from typing import Any, Dict, Optional -from openapi_client.models.intellapico_h4_a9yvm8c1p3 import IntellapicoH4A9yvm8c1p3 -from openapi_client.models.intellapico_nk9o_lf1_k1uex import IntellapicoNK9oLf1K1uex -from openapi_client.models.intellapico_nb_a0ny_pxxk6q import IntellapicoNbA0nyPxxk6q -from openapi_client.models.intellapico_xe_xa_u_mja_xt_px import IntellapicoXeXaUMjaXtPx -from openapi_client.models.intellapicor_vojkt5w_iz_uc import IntellapicorVOJKT5wIzUC -from openapi_client.models.intellapicormo5_lbzxs9_rb import Intellapicormo5LBZXS9Rb -from openapi_client.models.intellapicow_xa_faewe_tg_pt import IntellapicowXaFAEWeTgPt +from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy +from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh +from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 +from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ +from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV +from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 +from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq +from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 +from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l +from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak +from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm +from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs from openapi_client.api_client import ApiClient, RequestSerialized from openapi_client.api_response import ApiResponse @@ -269,7 +281,7 @@ def _aos_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -748,7 +760,7 @@ def _aos_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -994,7 +1006,7 @@ def _chat_history_messages_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -1706,7 +1718,7 @@ def _chat_history_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -1952,7 +1964,7 @@ def _chat_history_sessions_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -2209,6 +2221,8 @@ def _chat_history_sessions_options_serialize( @validate_call def chatbot_management_chatbots_get( self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2221,10 +2235,14 @@ def chatbot_management_chatbots_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> object: + ) -> AicusapicoiXUam8N8Dh8l: """chatbot_management_chatbots_get + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2248,6 +2266,8 @@ def chatbot_management_chatbots_get( """ # noqa: E501 _param = self._chatbot_management_chatbots_get_serialize( + page_size=page_size, + max_items=max_items, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2257,7 +2277,7 @@ def chatbot_management_chatbots_get( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2273,6 +2293,8 @@ def chatbot_management_chatbots_get( @validate_call def chatbot_management_chatbots_get_with_http_info( self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2285,10 +2307,14 @@ def chatbot_management_chatbots_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[object]: + ) -> ApiResponse[AicusapicoiXUam8N8Dh8l]: """chatbot_management_chatbots_get + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2312,6 +2338,8 @@ def chatbot_management_chatbots_get_with_http_info( """ # noqa: E501 _param = self._chatbot_management_chatbots_get_serialize( + page_size=page_size, + max_items=max_items, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2321,7 +2349,7 @@ def chatbot_management_chatbots_get_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2337,6 +2365,8 @@ def chatbot_management_chatbots_get_with_http_info( @validate_call def chatbot_management_chatbots_get_without_preload_content( self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2353,6 +2383,10 @@ def chatbot_management_chatbots_get_without_preload_content( """chatbot_management_chatbots_get + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2376,6 +2410,8 @@ def chatbot_management_chatbots_get_without_preload_content( """ # noqa: E501 _param = self._chatbot_management_chatbots_get_serialize( + page_size=page_size, + max_items=max_items, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2385,7 +2421,7 @@ def chatbot_management_chatbots_get_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2396,6 +2432,8 @@ def chatbot_management_chatbots_get_without_preload_content( def _chatbot_management_chatbots_get_serialize( self, + page_size, + max_items, _request_auth, _content_type, _headers, @@ -2416,6 +2454,14 @@ def _chatbot_management_chatbots_get_serialize( # process the path parameters # process the query parameters + if page_size is not None: + + _query_params.append(('page_size', page_size)) + + if max_items is not None: + + _query_params.append(('max_items', max_items)) + # process the header parameters # process the form parameters # process the body parameter @@ -2431,7 +2477,7 @@ def _chatbot_management_chatbots_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -2688,6 +2734,7 @@ def _chatbot_management_chatbots_options_serialize( @validate_call def chatbot_management_chatbots_post( self, + aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2700,10 +2747,12 @@ def chatbot_management_chatbots_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> object: + ) -> AicusapicohQbFv37cvtQS: """chatbot_management_chatbots_post + :param aicusapicob_mn2p_lk9_av_e8: (required) + :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2727,6 +2776,7 @@ def chatbot_management_chatbots_post( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( + aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2736,7 +2786,7 @@ def chatbot_management_chatbots_post( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2752,6 +2802,7 @@ def chatbot_management_chatbots_post( @validate_call def chatbot_management_chatbots_post_with_http_info( self, + aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2764,10 +2815,12 @@ def chatbot_management_chatbots_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[object]: + ) -> ApiResponse[AicusapicohQbFv37cvtQS]: """chatbot_management_chatbots_post + :param aicusapicob_mn2p_lk9_av_e8: (required) + :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2791,6 +2844,7 @@ def chatbot_management_chatbots_post_with_http_info( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( + aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2800,7 +2854,7 @@ def chatbot_management_chatbots_post_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2816,6 +2870,7 @@ def chatbot_management_chatbots_post_with_http_info( @validate_call def chatbot_management_chatbots_post_without_preload_content( self, + aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2832,6 +2887,8 @@ def chatbot_management_chatbots_post_without_preload_content( """chatbot_management_chatbots_post + :param aicusapicob_mn2p_lk9_av_e8: (required) + :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2855,6 +2912,7 @@ def chatbot_management_chatbots_post_without_preload_content( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( + aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2864,7 +2922,7 @@ def chatbot_management_chatbots_post_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "object", + '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2875,6 +2933,7 @@ def chatbot_management_chatbots_post_without_preload_content( def _chatbot_management_chatbots_post_serialize( self, + aicusapicob_mn2p_lk9_av_e8, _request_auth, _content_type, _headers, @@ -2898,6 +2957,8 @@ def _chatbot_management_chatbots_post_serialize( # process the header parameters # process the form parameters # process the body parameter + if aicusapicob_mn2p_lk9_av_e8 is not None: + _body_params = aicusapicob_mn2p_lk9_av_e8 # set the HTTP header `Accept` @@ -2907,10 +2968,23 @@ def _chatbot_management_chatbots_post_serialize( ] ) + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -2932,7 +3006,7 @@ def _chatbot_management_chatbots_post_serialize( @validate_call - def chatbot_management_options( + def chatbot_management_check_chatbot_options( self, _request_timeout: Union[ None, @@ -2947,7 +3021,7 @@ def chatbot_management_options( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> None: - """chatbot_management_options + """chatbot_management_check_chatbot_options :param _request_timeout: timeout setting for this request. If one @@ -2972,7 +3046,7 @@ def chatbot_management_options( :return: Returns the result object. """ # noqa: E501 - _param = self._chatbot_management_options_serialize( + _param = self._chatbot_management_check_chatbot_options_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2994,7 +3068,7 @@ def chatbot_management_options( @validate_call - def chatbot_management_options_with_http_info( + def chatbot_management_check_chatbot_options_with_http_info( self, _request_timeout: Union[ None, @@ -3009,7 +3083,7 @@ def chatbot_management_options_with_http_info( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> ApiResponse[None]: - """chatbot_management_options + """chatbot_management_check_chatbot_options :param _request_timeout: timeout setting for this request. If one @@ -3034,7 +3108,7 @@ def chatbot_management_options_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._chatbot_management_options_serialize( + _param = self._chatbot_management_check_chatbot_options_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3056,7 +3130,7 @@ def chatbot_management_options_with_http_info( @validate_call - def chatbot_management_options_without_preload_content( + def chatbot_management_check_chatbot_options_without_preload_content( self, _request_timeout: Union[ None, @@ -3071,7 +3145,7 @@ def chatbot_management_options_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """chatbot_management_options + """chatbot_management_check_chatbot_options :param _request_timeout: timeout setting for this request. If one @@ -3096,7 +3170,7 @@ def chatbot_management_options_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._chatbot_management_options_serialize( + _param = self._chatbot_management_check_chatbot_options_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3113,7 +3187,7 @@ def chatbot_management_options_without_preload_content( return response_data.response - def _chatbot_management_options_serialize( + def _chatbot_management_check_chatbot_options_serialize( self, _request_auth, _content_type, @@ -3148,7 +3222,7 @@ def _chatbot_management_options_serialize( return self.api_client.param_serialize( method='OPTIONS', - resource_path='/chatbot-management', + resource_path='/chatbot-management/check-chatbot', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -3165,7 +3239,7 @@ def _chatbot_management_options_serialize( @validate_call - def extract_options( + def chatbot_management_check_chatbot_post( self, _request_timeout: Union[ None, @@ -3179,8 +3253,8 @@ def extract_options( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> None: - """extract_options + ) -> object: + """chatbot_management_check_chatbot_post :param _request_timeout: timeout setting for this request. If one @@ -3205,7 +3279,7 @@ def extract_options( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_options_serialize( + _param = self._chatbot_management_check_chatbot_post_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3213,7 +3287,9 @@ def extract_options( ) _response_types_map: Dict[str, Optional[str]] = { - '204': None, + '400': "object", + '500': "object", + '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3227,7 +3303,7 @@ def extract_options( @validate_call - def extract_options_with_http_info( + def chatbot_management_check_chatbot_post_with_http_info( self, _request_timeout: Union[ None, @@ -3241,8 +3317,8 @@ def extract_options_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[None]: - """extract_options + ) -> ApiResponse[object]: + """chatbot_management_check_chatbot_post :param _request_timeout: timeout setting for this request. If one @@ -3267,7 +3343,7 @@ def extract_options_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_options_serialize( + _param = self._chatbot_management_check_chatbot_post_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3275,7 +3351,9 @@ def extract_options_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { - '204': None, + '400': "object", + '500': "object", + '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3289,7 +3367,7 @@ def extract_options_with_http_info( @validate_call - def extract_options_without_preload_content( + def chatbot_management_check_chatbot_post_without_preload_content( self, _request_timeout: Union[ None, @@ -3304,7 +3382,7 @@ def extract_options_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """extract_options + """chatbot_management_check_chatbot_post :param _request_timeout: timeout setting for this request. If one @@ -3329,7 +3407,7 @@ def extract_options_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_options_serialize( + _param = self._chatbot_management_check_chatbot_post_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3337,7 +3415,9 @@ def extract_options_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { - '204': None, + '400': "object", + '500': "object", + '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3346,7 +3426,7 @@ def extract_options_without_preload_content( return response_data.response - def _extract_options_serialize( + def _chatbot_management_check_chatbot_post_serialize( self, _request_auth, _content_type, @@ -3373,15 +3453,22 @@ def _extract_options_serialize( # process the body parameter + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) # authentication setting _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( - method='OPTIONS', - resource_path='/extract', + method='POST', + resource_path='/chatbot-management/check-chatbot', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -3398,7 +3485,7 @@ def _extract_options_serialize( @validate_call - def extract_post( + def chatbot_management_check_default_chatbot_get( self, _request_timeout: Union[ None, @@ -3413,7 +3500,7 @@ def extract_post( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> object: - """extract_post + """chatbot_management_check_default_chatbot_get :param _request_timeout: timeout setting for this request. If one @@ -3438,7 +3525,7 @@ def extract_post( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_post_serialize( + _param = self._chatbot_management_check_default_chatbot_get_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3462,7 +3549,7 @@ def extract_post( @validate_call - def extract_post_with_http_info( + def chatbot_management_check_default_chatbot_get_with_http_info( self, _request_timeout: Union[ None, @@ -3477,7 +3564,7 @@ def extract_post_with_http_info( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> ApiResponse[object]: - """extract_post + """chatbot_management_check_default_chatbot_get :param _request_timeout: timeout setting for this request. If one @@ -3502,7 +3589,7 @@ def extract_post_with_http_info( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_post_serialize( + _param = self._chatbot_management_check_default_chatbot_get_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3526,7 +3613,7 @@ def extract_post_with_http_info( @validate_call - def extract_post_without_preload_content( + def chatbot_management_check_default_chatbot_get_without_preload_content( self, _request_timeout: Union[ None, @@ -3541,7 +3628,7 @@ def extract_post_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """extract_post + """chatbot_management_check_default_chatbot_get :param _request_timeout: timeout setting for this request. If one @@ -3566,7 +3653,7 @@ def extract_post_without_preload_content( :return: Returns the result object. """ # noqa: E501 - _param = self._extract_post_serialize( + _param = self._chatbot_management_check_default_chatbot_get_serialize( _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3585,7 +3672,7 @@ def extract_post_without_preload_content( return response_data.response - def _extract_post_serialize( + def _chatbot_management_check_default_chatbot_get_serialize( self, _request_auth, _content_type, @@ -3622,12 +3709,4691 @@ def _extract_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( - method='POST', - resource_path='/extract', + method='GET', + resource_path='/chatbot-management/check-default-chatbot', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def chatbot_management_check_default_chatbot_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """chatbot_management_check_default_chatbot_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_check_default_chatbot_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def chatbot_management_check_default_chatbot_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """chatbot_management_check_default_chatbot_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_check_default_chatbot_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def chatbot_management_check_default_chatbot_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """chatbot_management_check_default_chatbot_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_check_default_chatbot_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _chatbot_management_check_default_chatbot_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/chatbot-management/check-default-chatbot', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def chatbot_management_embeddings_get( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """chatbot_management_embeddings_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def chatbot_management_embeddings_get_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """chatbot_management_embeddings_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def chatbot_management_embeddings_get_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """chatbot_management_embeddings_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _chatbot_management_embeddings_get_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/chatbot-management/embeddings', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def chatbot_management_embeddings_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """chatbot_management_embeddings_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def chatbot_management_embeddings_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """chatbot_management_embeddings_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def chatbot_management_embeddings_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """chatbot_management_embeddings_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_embeddings_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _chatbot_management_embeddings_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/chatbot-management/embeddings', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def chatbot_management_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """chatbot_management_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def chatbot_management_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """chatbot_management_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def chatbot_management_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """chatbot_management_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._chatbot_management_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _chatbot_management_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/chatbot-management', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def extract_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """extract_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def extract_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """extract_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def extract_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """extract_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _extract_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/extract', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def extract_post( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """extract_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def extract_post_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """extract_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def extract_post_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """extract_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._extract_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _extract_post_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/extract', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_download_template_get( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """intention_download_template_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_download_template_get_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """intention_download_template_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_download_template_get_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_download_template_get + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_get_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_download_template_get_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/intention/download-template', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_download_template_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_download_template_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_download_template_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_download_template_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_download_template_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_download_template_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_download_template_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_download_template_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention/download-template', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_execution_presigned_url_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_execution_presigned_url_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_execution_presigned_url_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_execution_presigned_url_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_execution_presigned_url_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_execution_presigned_url_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_execution_presigned_url_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention/execution-presigned-url', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_execution_presigned_url_post( + self, + aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AicusapicoUy1YBXiWJ5Aq: + """intention_execution_presigned_url_post + + + :param aicusapico_cyd129_m65y_kv: (required) + :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_post_serialize( + aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "AicusapicoUy1YBXiWJ5Aq", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_execution_presigned_url_post_with_http_info( + self, + aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AicusapicoUy1YBXiWJ5Aq]: + """intention_execution_presigned_url_post + + + :param aicusapico_cyd129_m65y_kv: (required) + :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_post_serialize( + aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "AicusapicoUy1YBXiWJ5Aq", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_execution_presigned_url_post_without_preload_content( + self, + aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_execution_presigned_url_post + + + :param aicusapico_cyd129_m65y_kv: (required) + :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_execution_presigned_url_post_serialize( + aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "AicusapicoUy1YBXiWJ5Aq", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_execution_presigned_url_post_serialize( + self, + aicusapico_cyd129_m65y_kv, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if aicusapico_cyd129_m65y_kv is not None: + _body_params = aicusapico_cyd129_m65y_kv + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/intention/execution-presigned-url', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_delete( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """intention_executions_delete + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_delete_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_delete_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """intention_executions_delete + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_delete_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_delete_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_delete + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_delete_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_delete_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/intention/executions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_execution_id_get( + self, + intention_id: StrictStr, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Aicusapico35klzY80ikPh: + """intention_executions_execution_id_get + + + :param intention_id: (required) + :type intention_id: str + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_get_serialize( + intention_id=intention_id, + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico35klzY80ikPh", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_execution_id_get_with_http_info( + self, + intention_id: StrictStr, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Aicusapico35klzY80ikPh]: + """intention_executions_execution_id_get + + + :param intention_id: (required) + :type intention_id: str + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_get_serialize( + intention_id=intention_id, + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico35klzY80ikPh", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_execution_id_get_without_preload_content( + self, + intention_id: StrictStr, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_execution_id_get + + + :param intention_id: (required) + :type intention_id: str + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_get_serialize( + intention_id=intention_id, + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico35klzY80ikPh", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_execution_id_get_serialize( + self, + intention_id, + execution_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if intention_id is not None: + _path_params['intentionId'] = intention_id + if execution_id is not None: + _path_params['executionId'] = execution_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/intention/executions/{executionId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_execution_id_options( + self, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_executions_execution_id_options + + + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_options_serialize( + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_execution_id_options_with_http_info( + self, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_executions_execution_id_options + + + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_options_serialize( + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_execution_id_options_without_preload_content( + self, + execution_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_execution_id_options + + + :param execution_id: (required) + :type execution_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_execution_id_options_serialize( + execution_id=execution_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_execution_id_options_serialize( + self, + execution_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if execution_id is not None: + _path_params['executionId'] = execution_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention/executions/{executionId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_get( + self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Aicusapicob9jxGQ8zv1AS: + """intention_executions_get + + + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_get_serialize( + page_size=page_size, + max_items=max_items, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapicob9jxGQ8zv1AS", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_get_with_http_info( + self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Aicusapicob9jxGQ8zv1AS]: + """intention_executions_get + + + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_get_serialize( + page_size=page_size, + max_items=max_items, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapicob9jxGQ8zv1AS", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_get_without_preload_content( + self, + page_size: Optional[StrictStr] = None, + max_items: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_get + + + :param page_size: + :type page_size: str + :param max_items: + :type max_items: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_get_serialize( + page_size=page_size, + max_items=max_items, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapicob9jxGQ8zv1AS", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_get_serialize( + self, + page_size, + max_items, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if page_size is not None: + + _query_params.append(('page_size', page_size)) + + if max_items is not None: + + _query_params.append(('max_items', max_items)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/intention/executions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_executions_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_executions_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention/executions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_executions_post( + self, + aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Aicusapico4rwMspzeBOe5: + """intention_executions_post + + + :param aicusapico_eoc_l_nul8cwxa: (required) + :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_post_serialize( + aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico4rwMspzeBOe5", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_executions_post_with_http_info( + self, + aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Aicusapico4rwMspzeBOe5]: + """intention_executions_post + + + :param aicusapico_eoc_l_nul8cwxa: (required) + :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_post_serialize( + aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico4rwMspzeBOe5", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_executions_post_without_preload_content( + self, + aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_executions_post + + + :param aicusapico_eoc_l_nul8cwxa: (required) + :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_executions_post_serialize( + aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "Aicusapico4rwMspzeBOe5", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_executions_post_serialize( + self, + aicusapico_eoc_l_nul8cwxa, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if aicusapico_eoc_l_nul8cwxa is not None: + _body_params = aicusapico_eoc_l_nul8cwxa + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/intention/executions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_index_used_scan_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_index_used_scan_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_index_used_scan_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_index_used_scan_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_index_used_scan_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_index_used_scan_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_index_used_scan_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention/index-used-scan', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_index_used_scan_post( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """intention_index_used_scan_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_index_used_scan_post_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """intention_index_used_scan_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_index_used_scan_post_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_index_used_scan_post + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_index_used_scan_post_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '400': "object", + '500': "object", + '200': "object", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_index_used_scan_post_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/intention/index-used-scan', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def intention_options( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """intention_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def intention_options_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """intention_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def intention_options_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """intention_options + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._intention_options_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '204': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _intention_options_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='OPTIONS', + resource_path='/intention', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -3646,7 +8412,7 @@ def _extract_post_serialize( @validate_call def knowledge_base_executions_delete( self, - intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3659,12 +8425,12 @@ def knowledge_base_executions_delete( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> IntellapicoNbA0nyPxxk6q: + ) -> Aicusapicoqew7t5vTA2ak: """knowledge_base_executions_delete - :param intellapico_h4_a9yvm8c1p3: (required) - :type intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3 + :param aicusapicor1_kt5_c2m_lnkm: (required) + :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3688,7 +8454,7 @@ def knowledge_base_executions_delete( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - intellapico_h4_a9yvm8c1p3=intellapico_h4_a9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3698,7 +8464,7 @@ def knowledge_base_executions_delete( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoNbA0nyPxxk6q", + '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -3714,7 +8480,7 @@ def knowledge_base_executions_delete( @validate_call def knowledge_base_executions_delete_with_http_info( self, - intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3727,12 +8493,12 @@ def knowledge_base_executions_delete_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[IntellapicoNbA0nyPxxk6q]: + ) -> ApiResponse[Aicusapicoqew7t5vTA2ak]: """knowledge_base_executions_delete - :param intellapico_h4_a9yvm8c1p3: (required) - :type intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3 + :param aicusapicor1_kt5_c2m_lnkm: (required) + :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3756,7 +8522,7 @@ def knowledge_base_executions_delete_with_http_info( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - intellapico_h4_a9yvm8c1p3=intellapico_h4_a9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3766,7 +8532,7 @@ def knowledge_base_executions_delete_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoNbA0nyPxxk6q", + '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -3782,7 +8548,7 @@ def knowledge_base_executions_delete_with_http_info( @validate_call def knowledge_base_executions_delete_without_preload_content( self, - intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3799,8 +8565,8 @@ def knowledge_base_executions_delete_without_preload_content( """knowledge_base_executions_delete - :param intellapico_h4_a9yvm8c1p3: (required) - :type intellapico_h4_a9yvm8c1p3: IntellapicoH4A9yvm8c1p3 + :param aicusapicor1_kt5_c2m_lnkm: (required) + :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3824,7 +8590,7 @@ def knowledge_base_executions_delete_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - intellapico_h4_a9yvm8c1p3=intellapico_h4_a9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3834,7 +8600,7 @@ def knowledge_base_executions_delete_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoNbA0nyPxxk6q", + '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -3845,7 +8611,7 @@ def knowledge_base_executions_delete_without_preload_content( def _knowledge_base_executions_delete_serialize( self, - intellapico_h4_a9yvm8c1p3, + aicusapicor1_kt5_c2m_lnkm, _request_auth, _content_type, _headers, @@ -3869,8 +8635,8 @@ def _knowledge_base_executions_delete_serialize( # process the header parameters # process the form parameters # process the body parameter - if intellapico_h4_a9yvm8c1p3 is not None: - _body_params = intellapico_h4_a9yvm8c1p3 + if aicusapicor1_kt5_c2m_lnkm is not None: + _body_params = aicusapicor1_kt5_c2m_lnkm # set the HTTP header `Accept` @@ -3896,7 +8662,7 @@ def _knowledge_base_executions_delete_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -3933,7 +8699,7 @@ def knowledge_base_executions_execution_id_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> IntellapicowXaFAEWeTgPt: + ) -> AicusapicoQjcoKzzZFI86: """knowledge_base_executions_execution_id_get @@ -3972,7 +8738,7 @@ def knowledge_base_executions_execution_id_get( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicowXaFAEWeTgPt", + '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -4001,7 +8767,7 @@ def knowledge_base_executions_execution_id_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[IntellapicowXaFAEWeTgPt]: + ) -> ApiResponse[AicusapicoQjcoKzzZFI86]: """knowledge_base_executions_execution_id_get @@ -4040,7 +8806,7 @@ def knowledge_base_executions_execution_id_get_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicowXaFAEWeTgPt", + '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -4108,7 +8874,7 @@ def knowledge_base_executions_execution_id_get_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicowXaFAEWeTgPt", + '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -4157,7 +8923,7 @@ def _knowledge_base_executions_execution_id_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -4443,7 +9209,7 @@ def knowledge_base_executions_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> IntellapicorVOJKT5wIzUC: + ) -> Aicusapico2TwvXbhsTncy: """knowledge_base_executions_get @@ -4485,7 +9251,7 @@ def knowledge_base_executions_get( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicorVOJKT5wIzUC", + '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -4515,7 +9281,7 @@ def knowledge_base_executions_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[IntellapicorVOJKT5wIzUC]: + ) -> ApiResponse[Aicusapico2TwvXbhsTncy]: """knowledge_base_executions_get @@ -4557,7 +9323,7 @@ def knowledge_base_executions_get_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicorVOJKT5wIzUC", + '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -4629,7 +9395,7 @@ def knowledge_base_executions_get_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicorVOJKT5wIzUC", + '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -4685,7 +9451,7 @@ def _knowledge_base_executions_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -4942,7 +9708,7 @@ def _knowledge_base_executions_options_serialize( @validate_call def knowledge_base_executions_post( self, - intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex, + aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -4959,8 +9725,8 @@ def knowledge_base_executions_post( """knowledge_base_executions_post - :param intellapico_nk9o_lf1_k1uex: (required) - :type intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex + :param aicusapicose_oar_xmrp_sns: (required) + :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -4984,7 +9750,7 @@ def knowledge_base_executions_post( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - intellapico_nk9o_lf1_k1uex=intellapico_nk9o_lf1_k1uex, + aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5010,7 +9776,7 @@ def knowledge_base_executions_post( @validate_call def knowledge_base_executions_post_with_http_info( self, - intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex, + aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5027,8 +9793,8 @@ def knowledge_base_executions_post_with_http_info( """knowledge_base_executions_post - :param intellapico_nk9o_lf1_k1uex: (required) - :type intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex + :param aicusapicose_oar_xmrp_sns: (required) + :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5052,7 +9818,7 @@ def knowledge_base_executions_post_with_http_info( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - intellapico_nk9o_lf1_k1uex=intellapico_nk9o_lf1_k1uex, + aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5078,7 +9844,7 @@ def knowledge_base_executions_post_with_http_info( @validate_call def knowledge_base_executions_post_without_preload_content( self, - intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex, + aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5095,8 +9861,8 @@ def knowledge_base_executions_post_without_preload_content( """knowledge_base_executions_post - :param intellapico_nk9o_lf1_k1uex: (required) - :type intellapico_nk9o_lf1_k1uex: IntellapicoNK9oLf1K1uex + :param aicusapicose_oar_xmrp_sns: (required) + :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5120,7 +9886,7 @@ def knowledge_base_executions_post_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - intellapico_nk9o_lf1_k1uex=intellapico_nk9o_lf1_k1uex, + aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5141,7 +9907,7 @@ def knowledge_base_executions_post_without_preload_content( def _knowledge_base_executions_post_serialize( self, - intellapico_nk9o_lf1_k1uex, + aicusapicose_oar_xmrp_sns, _request_auth, _content_type, _headers, @@ -5165,8 +9931,8 @@ def _knowledge_base_executions_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if intellapico_nk9o_lf1_k1uex is not None: - _body_params = intellapico_nk9o_lf1_k1uex + if aicusapicose_oar_xmrp_sns is not None: + _body_params = aicusapicose_oar_xmrp_sns # set the HTTP header `Accept` @@ -5192,7 +9958,7 @@ def _knowledge_base_executions_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -5449,7 +10215,7 @@ def _knowledge_base_kb_presigned_url_options_serialize( @validate_call def knowledge_base_kb_presigned_url_post( self, - intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb, + aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5462,12 +10228,12 @@ def knowledge_base_kb_presigned_url_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> IntellapicoXeXaUMjaXtPx: + ) -> Aicusapicoh5w3FRwxBjhG: """knowledge_base_kb_presigned_url_post - :param intellapicormo5_lbzxs9_rb: (required) - :type intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb + :param aicusapico51_raf_cay_oxi_z: (required) + :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5491,7 +10257,7 @@ def knowledge_base_kb_presigned_url_post( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - intellapicormo5_lbzxs9_rb=intellapicormo5_lbzxs9_rb, + aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5501,7 +10267,7 @@ def knowledge_base_kb_presigned_url_post( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoXeXaUMjaXtPx", + '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -5517,7 +10283,7 @@ def knowledge_base_kb_presigned_url_post( @validate_call def knowledge_base_kb_presigned_url_post_with_http_info( self, - intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb, + aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5530,12 +10296,12 @@ def knowledge_base_kb_presigned_url_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[IntellapicoXeXaUMjaXtPx]: + ) -> ApiResponse[Aicusapicoh5w3FRwxBjhG]: """knowledge_base_kb_presigned_url_post - :param intellapicormo5_lbzxs9_rb: (required) - :type intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb + :param aicusapico51_raf_cay_oxi_z: (required) + :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5559,7 +10325,7 @@ def knowledge_base_kb_presigned_url_post_with_http_info( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - intellapicormo5_lbzxs9_rb=intellapicormo5_lbzxs9_rb, + aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5569,7 +10335,7 @@ def knowledge_base_kb_presigned_url_post_with_http_info( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoXeXaUMjaXtPx", + '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -5585,7 +10351,7 @@ def knowledge_base_kb_presigned_url_post_with_http_info( @validate_call def knowledge_base_kb_presigned_url_post_without_preload_content( self, - intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb, + aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5602,8 +10368,8 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( """knowledge_base_kb_presigned_url_post - :param intellapicormo5_lbzxs9_rb: (required) - :type intellapicormo5_lbzxs9_rb: Intellapicormo5LBZXS9Rb + :param aicusapico51_raf_cay_oxi_z: (required) + :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5627,7 +10393,7 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - intellapicormo5_lbzxs9_rb=intellapicormo5_lbzxs9_rb, + aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5637,7 +10403,7 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { '400': "object", '500': "object", - '200': "IntellapicoXeXaUMjaXtPx", + '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -5648,7 +10414,7 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( def _knowledge_base_kb_presigned_url_post_serialize( self, - intellapicormo5_lbzxs9_rb, + aicusapico51_raf_cay_oxi_z, _request_auth, _content_type, _headers, @@ -5672,8 +10438,8 @@ def _knowledge_base_kb_presigned_url_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if intellapicormo5_lbzxs9_rb is not None: - _body_params = intellapicormo5_lbzxs9_rb + if aicusapico51_raf_cay_oxi_z is not None: + _body_params = aicusapico51_raf_cay_oxi_z # set the HTTP header `Accept` @@ -5699,7 +10465,7 @@ def _knowledge_base_kb_presigned_url_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -6411,7 +11177,7 @@ def _llm_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -6657,7 +11423,7 @@ def _prompt_management_models_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -7369,7 +12135,7 @@ def _prompt_management_prompts_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -7848,7 +12614,7 @@ def _prompt_management_prompts_post_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( @@ -8094,7 +12860,7 @@ def _prompt_management_scenes_get_serialize( # authentication setting _auth_settings: List[str] = [ - 'intelliagentapiconstructApiAuthorizerFB94A0DF' + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' ] return self.api_client.param_serialize( diff --git a/api_test/biz_logic/rest_api/openapi_client/api_client.py b/api_test/biz_logic/rest_api/openapi_client/api_client.py index d72b4a888..037e911fd 100644 --- a/api_test/biz_logic/rest_api/openapi_client/api_client.py +++ b/api_test/biz_logic/rest_api/openapi_client/api_client.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/biz_logic/rest_api/openapi_client/configuration.py b/api_test/biz_logic/rest_api/openapi_client/configuration.py index 3981a257a..739debbe1 100644 --- a/api_test/biz_logic/rest_api/openapi_client/configuration.py +++ b/api_test/biz_logic/rest_api/openapi_client/configuration.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -89,7 +89,7 @@ def __init__(self, host=None, ) -> None: """Constructor """ - self._base_path = "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/prod" if host is None else host + self._base_path = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" if host is None else host """Default Base url """ self.server_index = 0 if server_index is None and host is None else server_index @@ -379,13 +379,13 @@ def auth_settings(self): :return: The Auth Settings information dict. """ auth = {} - if 'intelliagentapiconstructApiAuthorizerFB94A0DF' in self.api_key: - auth['intelliagentapiconstructApiAuthorizerFB94A0DF'] = { + if 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC' in self.api_key: + auth['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = { 'type': 'api_key', 'in': 'header', 'key': 'Authorization', 'value': self.get_api_key_with_prefix( - 'intelliagentapiconstructApiAuthorizerFB94A0DF', + 'aicustomerserviceapiconstructApiAuthorizerEB0B49FC', ), } return auth @@ -398,7 +398,7 @@ def to_debug_report(self): return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ - "Version of the API: 2024-07-29T06:23:46Z\n"\ + "Version of the API: 2024-10-21T08:32:58Z\n"\ "SDK Package Version: 1.0.0".\ format(env=sys.platform, pyversion=sys.version) @@ -409,7 +409,7 @@ def get_host_settings(self): """ return [ { - 'url': "https://14ixphvl88.execute-api.us-east-1.amazonaws.com/{basePath}", + 'url': "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/{basePath}", 'description': "No description provided", 'variables': { 'basePath': { diff --git a/api_test/biz_logic/rest_api/openapi_client/exceptions.py b/api_test/biz_logic/rest_api/openapi_client/exceptions.py index 9a87cb2fa..95553a825 100644 --- a/api_test/biz_logic/rest_api/openapi_client/exceptions.py +++ b/api_test/biz_logic/rest_api/openapi_client/exceptions.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/biz_logic/rest_api/openapi_client/models/__init__.py b/api_test/biz_logic/rest_api/openapi_client/models/__init__.py index 3bb1f6cf4..2d63e90af 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/__init__.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/__init__.py @@ -2,11 +2,11 @@ # flake8: noqa """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -14,13 +14,29 @@ # import models into model package -from openapi_client.models.intellapico_h4_a9yvm8c1p3 import IntellapicoH4A9yvm8c1p3 -from openapi_client.models.intellapico_nk9o_lf1_k1uex import IntellapicoNK9oLf1K1uex -from openapi_client.models.intellapico_nb_a0ny_pxxk6q import IntellapicoNbA0nyPxxk6q -from openapi_client.models.intellapico_xe_xa_u_mja_xt_px import IntellapicoXeXaUMjaXtPx -from openapi_client.models.intellapicor_vojkt5w_iz_uc import IntellapicorVOJKT5wIzUC -from openapi_client.models.intellapicor_vojkt5w_iz_uc_config import IntellapicorVOJKT5wIzUCConfig -from openapi_client.models.intellapicor_vojkt5w_iz_uc_items_inner import IntellapicorVOJKT5wIzUCItemsInner -from openapi_client.models.intellapicormo5_lbzxs9_rb import Intellapicormo5LBZXS9Rb -from openapi_client.models.intellapicow_xa_faewe_tg_pt import IntellapicowXaFAEWeTgPt -from openapi_client.models.intellapicow_xa_faewe_tg_pt_items_inner import IntellapicowXaFAEWeTgPtItemsInner +from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig +from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner +from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner +from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 +from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload +from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ +from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV +from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 +from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner +from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq +from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS +from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index +from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner +from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak +from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm +from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py similarity index 72% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py index 45b88a7a5..8e2e81c90 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,17 +19,17 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.intellapicor_vojkt5w_iz_uc_config import IntellapicorVOJKT5wIzUCConfig -from openapi_client.models.intellapicor_vojkt5w_iz_uc_items_inner import IntellapicorVOJKT5wIzUCItemsInner +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig +from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner from typing import Optional, Set from typing_extensions import Self -class IntellapicorVOJKT5wIzUC(BaseModel): +class Aicusapico2TwvXbhsTncy(BaseModel): """ - IntellapicorVOJKT5wIzUC + Aicusapico2TwvXbhsTncy """ # noqa: E501 - config: Optional[IntellapicorVOJKT5wIzUCConfig] = Field(default=None, alias="Config") - items: Optional[List[IntellapicorVOJKT5wIzUCItemsInner]] = Field(default=None, alias="Items") + config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") + items: Optional[List[Aicusapico2TwvXbhsTncyItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Config", "Items", "Count"] @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUC from a JSON string""" + """Create an instance of Aicusapico2TwvXbhsTncy from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -86,7 +86,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUC from a dict""" + """Create an instance of Aicusapico2TwvXbhsTncy from a dict""" if obj is None: return None @@ -94,8 +94,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Config": IntellapicorVOJKT5wIzUCConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, - "Items": [IntellapicorVOJKT5wIzUCItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [Aicusapico2TwvXbhsTncyItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_config.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py similarity index 86% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_config.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py index 83de084ef..aab1cae83 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_config.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicorVOJKT5wIzUCConfig(BaseModel): +class Aicusapico2TwvXbhsTncyConfig(BaseModel): """ - IntellapicorVOJKT5wIzUCConfig + Aicusapico2TwvXbhsTncyConfig """ # noqa: E501 page_size: Optional[StrictInt] = Field(default=None, alias="PageSize") max_items: Optional[StrictInt] = Field(default=None, alias="MaxItems") @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUCConfig from a JSON string""" + """Create an instance of Aicusapico2TwvXbhsTncyConfig from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUCConfig from a dict""" + """Create an instance of Aicusapico2TwvXbhsTncyConfig from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py similarity index 90% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py index 183847a6e..53a4fb6d3 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicor_vojkt5w_iz_uc_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicorVOJKT5wIzUCItemsInner(BaseModel): +class Aicusapico2TwvXbhsTncyItemsInner(BaseModel): """ - IntellapicorVOJKT5wIzUCItemsInner + Aicusapico2TwvXbhsTncyItemsInner """ # noqa: E501 execution_status: StrictStr = Field(alias="executionStatus") s3_prefix: StrictStr = Field(alias="s3Prefix") @@ -61,7 +61,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUCItemsInner from a JSON string""" + """Create an instance of Aicusapico2TwvXbhsTncyItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -86,7 +86,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicorVOJKT5wIzUCItemsInner from a dict""" + """Create an instance of Aicusapico2TwvXbhsTncyItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py similarity index 78% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py index a9ef6c63f..3a1dc74c7 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,15 +19,15 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.intellapicow_xa_faewe_tg_pt_items_inner import IntellapicowXaFAEWeTgPtItemsInner +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner from typing import Optional, Set from typing_extensions import Self -class IntellapicowXaFAEWeTgPt(BaseModel): +class Aicusapico35klzY80ikPh(BaseModel): """ - IntellapicowXaFAEWeTgPt + Aicusapico35klzY80ikPh """ # noqa: E501 - items: Optional[List[IntellapicowXaFAEWeTgPtItemsInner]] = Field(default=None, alias="Items") + items: Optional[List[Aicusapico35klzY80ikPhItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Items", "Count"] @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicowXaFAEWeTgPt from a JSON string""" + """Create an instance of Aicusapico35klzY80ikPh from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicowXaFAEWeTgPt from a dict""" + """Create an instance of Aicusapico35klzY80ikPh from a dict""" if obj is None: return None @@ -89,7 +89,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Items": [IntellapicowXaFAEWeTgPtItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Items": [Aicusapico35klzY80ikPhItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py new file mode 100644 index 000000000..c7d67e18a --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapico35klzY80ikPhItemsInner(BaseModel): + """ + Aicusapico35klzY80ikPhItemsInner + """ # noqa: E501 + s3_path: StrictStr = Field(alias="s3Path") + create_time: StrictStr = Field(alias="createTime") + s3_prefix: StrictStr = Field(alias="s3Prefix") + qa_list: Optional[List[Aicusapico35klzY80ikPhItemsInnerQAListInner]] = Field(default=None, alias="QAList") + status: StrictStr + __properties: ClassVar[List[str]] = ["s3Path", "createTime", "s3Prefix", "QAList", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapico35klzY80ikPhItemsInner from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in qa_list (list) + _items = [] + if self.qa_list: + for _item in self.qa_list: + if _item: + _items.append(_item.to_dict()) + _dict['QAList'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapico35klzY80ikPhItemsInner from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "s3Path": obj.get("s3Path"), + "createTime": obj.get("createTime"), + "s3Prefix": obj.get("s3Prefix"), + "QAList": [Aicusapico35klzY80ikPhItemsInnerQAListInner.from_dict(_item) for _item in obj["QAList"]] if obj.get("QAList") is not None else None, + "status": obj.get("status") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py new file mode 100644 index 000000000..0da9bf782 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapico35klzY80ikPhItemsInnerQAListInner(BaseModel): + """ + Aicusapico35klzY80ikPhItemsInnerQAListInner + """ # noqa: E501 + question: Optional[StrictStr] = None + kwargs: Optional[StrictStr] = None + intention: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["question", "kwargs", "intention"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "question": obj.get("question"), + "kwargs": obj.get("kwargs"), + "intention": obj.get("intention") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py new file mode 100644 index 000000000..375b8c5b8 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapico4rwMspzeBOe5(BaseModel): + """ + Aicusapico4rwMspzeBOe5 + """ # noqa: E501 + result: Optional[StrictStr] = None + execution_id: Optional[StrictStr] = None + input_payload: Optional[Aicusapico4rwMspzeBOe5InputPayload] = None + __properties: ClassVar[List[str]] = ["result", "execution_id", "input_payload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapico4rwMspzeBOe5 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of input_payload + if self.input_payload: + _dict['input_payload'] = self.input_payload.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapico4rwMspzeBOe5 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "result": obj.get("result"), + "execution_id": obj.get("execution_id"), + "input_payload": Aicusapico4rwMspzeBOe5InputPayload.from_dict(obj["input_payload"]) if obj.get("input_payload") is not None else None + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py new file mode 100644 index 000000000..960dbfddd --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapico4rwMspzeBOe5InputPayload(BaseModel): + """ + Aicusapico4rwMspzeBOe5InputPayload + """ # noqa: E501 + chatbot_id: Optional[StrictStr] = Field(default=None, alias="chatbotId") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + table_item_id: Optional[StrictStr] = Field(default=None, alias="tableItemId") + field_name: Optional[StrictStr] = Field(default=None, alias="fieldName") + index: Optional[StrictStr] = None + model: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["chatbotId", "groupName", "tableItemId", "fieldName", "index", "model"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapico4rwMspzeBOe5InputPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapico4rwMspzeBOe5InputPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "chatbotId": obj.get("chatbotId"), + "groupName": obj.get("groupName"), + "tableItemId": obj.get("tableItemId"), + "fieldName": obj.get("fieldName"), + "index": obj.get("index"), + "model": obj.get("model") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicormo5_lbzxs9_rb.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py similarity index 86% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicormo5_lbzxs9_rb.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py index e8783b0f4..775605668 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicormo5_lbzxs9_rb.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Intellapicormo5LBZXS9Rb(BaseModel): +class Aicusapico51RafCAYOxiZ(BaseModel): """ - Intellapicormo5LBZXS9Rb + Aicusapico51RafCAYOxiZ """ # noqa: E501 content_type: StrictStr file_name: StrictStr @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Intellapicormo5LBZXS9Rb from a JSON string""" + """Create an instance of Aicusapico51RafCAYOxiZ from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Intellapicormo5LBZXS9Rb from a dict""" + """Create an instance of Aicusapico51RafCAYOxiZ from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py new file mode 100644 index 000000000..8acdc2c49 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicoCyd129M65yKV(BaseModel): + """ + AicusapicoCyd129M65yKV + """ # noqa: E501 + content_type: StrictStr + file_name: StrictStr + __properties: ClassVar[List[str]] = ["content_type", "file_name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicoCyd129M65yKV from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicoCyd129M65yKV from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "content_type": obj.get("content_type"), + "file_name": obj.get("file_name") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py new file mode 100644 index 000000000..7dc1f75c0 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicoEOcLNul8cwxa(BaseModel): + """ + AicusapicoEOcLNul8cwxa + """ # noqa: E501 + chatbot_id: StrictStr = Field(alias="chatbotId") + index: StrictStr + model: StrictStr + s3_prefix: StrictStr = Field(alias="s3Prefix") + s3_bucket: StrictStr = Field(alias="s3Bucket") + __properties: ClassVar[List[str]] = ["chatbotId", "index", "model", "s3Prefix", "s3Bucket"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicoEOcLNul8cwxa from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicoEOcLNul8cwxa from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "chatbotId": obj.get("chatbotId"), + "index": obj.get("index"), + "model": obj.get("model"), + "s3Prefix": obj.get("s3Prefix"), + "s3Bucket": obj.get("s3Bucket") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py new file mode 100644 index 000000000..d3933ef75 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicoQjcoKzzZFI86(BaseModel): + """ + AicusapicoQjcoKzzZFI86 + """ # noqa: E501 + items: Optional[List[AicusapicoQjcoKzzZFI86ItemsInner]] = Field(default=None, alias="Items") + count: Optional[StrictInt] = Field(default=None, alias="Count") + __properties: ClassVar[List[str]] = ["Items", "Count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicoQjcoKzzZFI86 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['Items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicoQjcoKzzZFI86 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "Items": [AicusapicoQjcoKzzZFI86ItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Count": obj.get("Count") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py similarity index 87% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py index 3d75c4a90..b531669c3 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapicow_xa_faewe_tg_pt_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicowXaFAEWeTgPtItemsInner(BaseModel): +class AicusapicoQjcoKzzZFI86ItemsInner(BaseModel): """ - IntellapicowXaFAEWeTgPtItemsInner + AicusapicoQjcoKzzZFI86ItemsInner """ # noqa: E501 execution_id: StrictStr = Field(alias="executionId") s3_path: StrictStr = Field(alias="s3Path") @@ -52,7 +52,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicowXaFAEWeTgPtItemsInner from a JSON string""" + """Create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -77,7 +77,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicowXaFAEWeTgPtItemsInner from a dict""" + """Create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_xe_xa_u_mja_xt_px.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py similarity index 87% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapico_xe_xa_u_mja_xt_px.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py index be57427de..64952c6b2 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_xe_xa_u_mja_xt_px.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicoXeXaUMjaXtPx(BaseModel): +class AicusapicoUy1YBXiWJ5Aq(BaseModel): """ - IntellapicoXeXaUMjaXtPx + AicusapicoUy1YBXiWJ5Aq """ # noqa: E501 data: Optional[StrictStr] = None message: Optional[StrictStr] = None @@ -50,7 +50,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicoXeXaUMjaXtPx from a JSON string""" + """Create an instance of AicusapicoUy1YBXiWJ5Aq from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -75,7 +75,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicoXeXaUMjaXtPx from a dict""" + """Create an instance of AicusapicoUy1YBXiWJ5Aq from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py new file mode 100644 index 000000000..d8902eacd --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig +from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapicob9jxGQ8zv1AS(BaseModel): + """ + Aicusapicob9jxGQ8zv1AS + """ # noqa: E501 + config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") + items: Optional[List[Aicusapicob9jxGQ8zv1ASItemsInner]] = Field(default=None, alias="Items") + count: Optional[StrictInt] = Field(default=None, alias="Count") + __properties: ClassVar[List[str]] = ["Config", "Items", "Count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapicob9jxGQ8zv1AS from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['Config'] = self.config.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['Items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapicob9jxGQ8zv1AS from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [Aicusapicob9jxGQ8zv1ASItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Count": obj.get("Count") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py new file mode 100644 index 000000000..a902314a3 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapicob9jxGQ8zv1ASItemsInner(BaseModel): + """ + Aicusapicob9jxGQ8zv1ASItemsInner + """ # noqa: E501 + execution_id: StrictStr = Field(alias="executionId") + file_name: StrictStr = Field(alias="fileName") + create_by: StrictStr = Field(alias="createBy") + chatbot_id: StrictStr = Field(alias="chatbotId") + create_time: StrictStr = Field(alias="createTime") + execution_status: StrictStr = Field(alias="executionStatus") + index: StrictStr + model: StrictStr + details: StrictStr + tag: StrictStr + __properties: ClassVar[List[str]] = ["executionId", "fileName", "createBy", "chatbotId", "createTime", "executionStatus", "index", "model", "details", "tag"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "executionId": obj.get("executionId"), + "fileName": obj.get("fileName"), + "createBy": obj.get("createBy"), + "chatbotId": obj.get("chatbotId"), + "createTime": obj.get("createTime"), + "executionStatus": obj.get("executionStatus"), + "index": obj.get("index"), + "model": obj.get("model"), + "details": obj.get("details"), + "tag": obj.get("tag") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py new file mode 100644 index 000000000..f6c7ab336 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicobMN2pLK9AvE8(BaseModel): + """ + AicusapicobMN2pLK9AvE8 + """ # noqa: E501 + model_name: StrictStr = Field(alias="modelName") + chatbot_id: StrictStr = Field(alias="chatbotId") + model_id: StrictStr = Field(alias="modelId") + index: AicusapicobMN2pLK9AvE8Index + __properties: ClassVar[List[str]] = ["modelName", "chatbotId", "modelId", "index"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicobMN2pLK9AvE8 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of index + if self.index: + _dict['index'] = self.index.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicobMN2pLK9AvE8 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "modelName": obj.get("modelName"), + "chatbotId": obj.get("chatbotId"), + "modelId": obj.get("modelId"), + "index": AicusapicobMN2pLK9AvE8Index.from_dict(obj["index"]) if obj.get("index") is not None else None + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py new file mode 100644 index 000000000..a9d352cb1 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicobMN2pLK9AvE8Index(BaseModel): + """ + AicusapicobMN2pLK9AvE8Index + """ # noqa: E501 + qq: StrictStr + qd: StrictStr + intention: StrictStr + __properties: ClassVar[List[str]] = ["qq", "qd", "intention"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicobMN2pLK9AvE8Index from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicobMN2pLK9AvE8Index from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "qq": obj.get("qq"), + "qd": obj.get("qd"), + "intention": obj.get("intention") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py new file mode 100644 index 000000000..ba6a208d9 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapicoh5w3FRwxBjhG(BaseModel): + """ + Aicusapicoh5w3FRwxBjhG + """ # noqa: E501 + data: Optional[StrictStr] = None + message: Optional[StrictStr] = None + s3_prefix: Optional[StrictStr] = Field(default=None, alias="s3Prefix") + s3_bucket: Optional[StrictStr] = Field(default=None, alias="s3Bucket") + __properties: ClassVar[List[str]] = ["data", "message", "s3Prefix", "s3Bucket"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapicoh5w3FRwxBjhG from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapicoh5w3FRwxBjhG from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": obj.get("data"), + "message": obj.get("message"), + "s3Prefix": obj.get("s3Prefix"), + "s3Bucket": obj.get("s3Bucket") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py new file mode 100644 index 000000000..18191678c --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicohQbFv37cvtQS(BaseModel): + """ + AicusapicohQbFv37cvtQS + """ # noqa: E501 + chatbot_id: Optional[StrictStr] = Field(default=None, alias="chatbotId") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + index_ids: Optional[AicusapicohQbFv37cvtQSIndexIds] = Field(default=None, alias="indexIds") + message: Optional[StrictStr] = Field(default=None, alias="Message") + __properties: ClassVar[List[str]] = ["chatbotId", "groupName", "indexIds", "Message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicohQbFv37cvtQS from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of index_ids + if self.index_ids: + _dict['indexIds'] = self.index_ids.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicohQbFv37cvtQS from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "chatbotId": obj.get("chatbotId"), + "groupName": obj.get("groupName"), + "indexIds": AicusapicohQbFv37cvtQSIndexIds.from_dict(obj["indexIds"]) if obj.get("indexIds") is not None else None, + "Message": obj.get("Message") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py new file mode 100644 index 000000000..0c82f4f98 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicohQbFv37cvtQSIndexIds(BaseModel): + """ + AicusapicohQbFv37cvtQSIndexIds + """ # noqa: E501 + qq: Optional[StrictStr] = None + qd: Optional[StrictStr] = None + intention: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["qq", "qd", "intention"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicohQbFv37cvtQSIndexIds from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicohQbFv37cvtQSIndexIds from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "qq": obj.get("qq"), + "qd": obj.get("qd"), + "intention": obj.get("intention") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py new file mode 100644 index 000000000..0c15c437c --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig +from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicoiXUam8N8Dh8l(BaseModel): + """ + AicusapicoiXUam8N8Dh8l + """ # noqa: E501 + chatbot_ids: Optional[List[StrictStr]] = None + config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") + items: Optional[List[AicusapicoiXUam8N8Dh8lItemsInner]] = Field(default=None, alias="Items") + count: Optional[StrictInt] = Field(default=None, alias="Count") + __properties: ClassVar[List[str]] = ["chatbot_ids", "Config", "Items", "Count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicoiXUam8N8Dh8l from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['Config'] = self.config.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in items (list) + _items = [] + if self.items: + for _item in self.items: + if _item: + _items.append(_item.to_dict()) + _dict['Items'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicoiXUam8N8Dh8l from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "chatbot_ids": obj.get("chatbot_ids"), + "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [AicusapicoiXUam8N8Dh8lItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Count": obj.get("Count") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py new file mode 100644 index 000000000..28529c908 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-21T08:32:58Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AicusapicoiXUam8N8Dh8lItemsInner(BaseModel): + """ + AicusapicoiXUam8N8Dh8lItemsInner + """ # noqa: E501 + chatbot_id: StrictStr = Field(alias="ChatbotId") + model_name: StrictStr = Field(alias="ModelName") + last_modified_time: StrictStr = Field(alias="LastModifiedTime") + model_id: StrictStr = Field(alias="ModelId") + __properties: ClassVar[List[str]] = ["ChatbotId", "ModelName", "LastModifiedTime", "ModelId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "ChatbotId": obj.get("ChatbotId"), + "ModelName": obj.get("ModelName"), + "LastModifiedTime": obj.get("LastModifiedTime"), + "ModelId": obj.get("ModelId") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_nb_a0ny_pxxk6q.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py similarity index 86% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapico_nb_a0ny_pxxk6q.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py index 6ab1f7a59..6c6b58728 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_nb_a0ny_pxxk6q.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicoNbA0nyPxxk6q(BaseModel): +class Aicusapicoqew7t5vTA2ak(BaseModel): """ - IntellapicoNbA0nyPxxk6q + Aicusapicoqew7t5vTA2ak """ # noqa: E501 data: Optional[List[StrictStr]] = None message: Optional[StrictStr] = None @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicoNbA0nyPxxk6q from a JSON string""" + """Create an instance of Aicusapicoqew7t5vTA2ak from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicoNbA0nyPxxk6q from a dict""" + """Create an instance of Aicusapicoqew7t5vTA2ak from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_h4_a9yvm8c1p3.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py similarity index 86% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapico_h4_a9yvm8c1p3.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py index e358fa00f..505f3ef51 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_h4_a9yvm8c1p3.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicoH4A9yvm8c1p3(BaseModel): +class Aicusapicor1Kt5C2mLnkm(BaseModel): """ - IntellapicoH4A9yvm8c1p3 + Aicusapicor1Kt5C2mLnkm """ # noqa: E501 execution_id: List[StrictStr] = Field(alias="executionId") __properties: ClassVar[List[str]] = ["executionId"] @@ -47,7 +47,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicoH4A9yvm8c1p3 from a JSON string""" + """Create an instance of Aicusapicor1Kt5C2mLnkm from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -72,7 +72,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicoH4A9yvm8c1p3 from a dict""" + """Create an instance of Aicusapicor1Kt5C2mLnkm from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_nk9o_lf1_k1uex.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/intellapico_nk9o_lf1_k1uex.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py index 6e5403b62..e63f89f42 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/intellapico_nk9o_lf1_k1uex.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class IntellapicoNK9oLf1K1uex(BaseModel): +class AicusapicoseOArXMRpSNs(BaseModel): """ - IntellapicoNK9oLf1K1uex + AicusapicoseOArXMRpSNs """ # noqa: E501 offline: StrictStr chatbot_id: StrictStr = Field(alias="chatbotId") @@ -53,7 +53,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of IntellapicoNK9oLf1K1uex from a JSON string""" + """Create an instance of AicusapicoseOArXMRpSNs from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -78,7 +78,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of IntellapicoNK9oLf1K1uex from a dict""" + """Create an instance of AicusapicoseOArXMRpSNs from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/rest.py b/api_test/biz_logic/rest_api/openapi_client/rest.py index 9b943bfda..70ab0f033 100644 --- a/api_test/biz_logic/rest_api/openapi_client/rest.py +++ b/api_test/biz_logic/rest_api/openapi_client/rest.py @@ -1,11 +1,11 @@ # coding: utf-8 """ - Intelli-Agent-RESTful-API + aics-api - Intelli-Agent RESTful API + AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-07-29T06:23:46Z + The version of the OpenAPI document: 2024-10-21T08:32:58Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/biz_logic/rest_api/test-requirements.txt b/api_test/biz_logic/rest_api/test-requirements.txt deleted file mode 100644 index 8e6d8cb13..000000000 --- a/api_test/biz_logic/rest_api/test-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -pytest~=7.1.3 -pytest-cov>=2.8.1 -pytest-randomly>=3.12.0 -mypy>=1.4.1 -types-python-dateutil>=2.8.19 diff --git a/api_test/buildspec-20241012.yaml b/api_test/buildspec-20241012.yaml index 12b80fd18..82a6497ff 100644 --- a/api_test/buildspec-20241012.yaml +++ b/api_test/buildspec-20241012.yaml @@ -3,7 +3,7 @@ version: 0.2 env: variables: REPOSITORY_URL: "https://github.com/aws-samples/Intelli-Agent.git" - CODE_BRANCH: "dev" + CODE_BRANCH: "main" PROJECT_NAME: "Intelli-Agent" STACK_NAME: "ai-customer-service" DEPLOY_STACK: "cdk" @@ -23,9 +23,9 @@ env: phases: install: commands: - - aws cloudformation delete-stack --stack-name "$STACK_NAME" + - aws cloudformation delete-stack --stack-name $STACK_NAME - | - aws cloudformation wait stack-delete-complete --stack-name "$STACK_NAME" + aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME WAIT_STATUS=$? if [ $WAIT_STATUS -eq 0 ]; then echo "Stack deletion complete." @@ -41,8 +41,8 @@ phases: echo $lambda_status if [ $lambda_status -eq 200 ]; then echo "Lambda executed successfully. Proceeding with stack deletion." - aws cloudformation delete-stack --stack-name "$STACK_NAME" - aws cloudformation wait stack-delete-complete --stack-name "$STACK_NAME" + aws cloudformation delete-stack --stack-name $STACK_NAME + aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME if [ $? -eq 0 ]; then echo "Stack deletion complete after Lambda execution." else @@ -72,9 +72,9 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" - sed -i 's/support@example.com/$SUB_EMAIL/g' bin/config.ts + sed -i "s/support@example.com/${SUB_EMAIL}/g" bin/config.ts pnpm i - npx cdk synth 2>&1 | tee synth.log + npx cdk synth > synth.log 2>&1 if [ ${PIPESTATUS[0]} -ne 0 ]; then echo "cdk synth failed" # jq -n --arg error "$(cat synth.log)" '{detail: $error}' > sync_error.json @@ -90,7 +90,7 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then pushd "Intelli-Agent/source/infrastructure" pnpm i - npx cdk deploy $STACK_NAME + npx cdk deploy $STACK_NAME --require-approval never deploy_exit_code=$? if [ $deploy_exit_code -ne 0 ]; then echo "CDK deployment failed. Sending email and exiting with status code 1." @@ -130,11 +130,11 @@ phases: pip3 install --upgrade pip pip3 --default-timeout=6000 install -r requirements.txt popd - stack_info=$(aws cloudformation describe-stacks --stack-name "$STACK_NAME") + stack_info=$(aws cloudformation describe-stacks --stack-name $STACK_NAME) rest_api_gateway_url=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="APIEndpointAddress").OutputValue') ws_api_gateway_url=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="WebSocketEndpointAddress").OutputValue') - user_pool_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="UserPoolId").OutputValue') - oidc_client_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="OidcClientId").OutputValue') + user_pool_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="UserPoolID").OutputValue') + oidc_client_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="OIDCClientID").OutputValue') aws cognito-idp admin-create-user \ --user-pool-id $user_pool_id \ --username lvning@amazon.com \ diff --git a/api_test/sourceGen.sh b/api_test/sourceGen.sh new file mode 100755 index 000000000..22112ee9c --- /dev/null +++ b/api_test/sourceGen.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +yes | rm -rf ./biz_logic/rest_api/* +mkdir generated-client +chmod a+w ./generated-client + +openapi-generator-cli generate -i Intelli-Agent-RESTful-API-prod-oas30.json -g python -o ./generated-client + +mv ./generated-client/docs ./biz_logic/rest_api/ +mv ./generated-client/openapi_client ./biz_logic/rest_api/ + +touch ./biz_logic/rest_api/__init__.py +sed -i '/__version__ = "1.0.0"/a\import sys\nimport os\nopenapi_client_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../biz_logic/rest_api"))\nsys.path.insert(0, openapi_client_path)\n' ./biz_logic/rest_api/openapi_client/__init__.py + + +rm -rf ./generated-client diff --git a/api_test/test_case/test_01_rest_document.py b/api_test/test_case/test_01_rest_document.py index 18312b2e8..ac5c4cae5 100644 --- a/api_test/test_case/test_01_rest_document.py +++ b/api_test/test_case/test_01_rest_document.py @@ -64,11 +64,11 @@ def teardown_method(self, method): def test_01_upload_document_pdf(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='application/pdf', file_name="summary.pdf") + param = openapi_client.Aicusapico51RafCAYOxiZ(content_type='application/pdf', file_name="summary.pdf") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_01_upload_document_pdf test failed" self.__upload_file_to_s3(response.data, "./test_data/summary.pdf") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( + post_param = openapi_client.AicusapicoseOArXMRpSNs( s3Bucket=response.s3_bucket, s3Prefix=response.s3_prefix, indexType="qd", diff --git a/source/infrastructure/lib/api/api-stack.ts b/source/infrastructure/lib/api/api-stack.ts index 5e0131047..ff899850b 100644 --- a/source/infrastructure/lib/api/api-stack.ts +++ b/source/infrastructure/lib/api/api-stack.ts @@ -381,10 +381,13 @@ export class ApiConstruct extends Construct { new apigw.LambdaIntegration(uploadDocLambda.function), {... this.genMethodOption(api, auth, { - data: { type: JsonSchemaType.STRING }, - message: { type: JsonSchemaType.STRING }, - s3Bucket: { type: JsonSchemaType.STRING }, - s3Prefix: { type: JsonSchemaType.STRING } + data: { type: JsonSchemaType.OBJECT, + properties: { + s3Bucket: { type: JsonSchemaType.STRING }, + s3Prefix: { type: JsonSchemaType.STRING } + } + }, + message: { type: JsonSchemaType.STRING } }), requestModels: this.genRequestModel(api, { "content_type": { "type": JsonSchemaType.STRING }, From f515aac01741d39e7ca524c7c89ceb36e606e35c Mon Sep 17 00:00:00 2001 From: NingLyu Date: Wed, 23 Oct 2024 01:16:23 +0000 Subject: [PATCH 016/110] chore: update rest api schema --- source/lambda/online/lambda_main/main.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index fad052f33..728ffe952 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -106,9 +106,10 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): request_timestamp = context["request_timestamp"] client_type = event_body.get("client_type", "default_client_type") session_id = event_body.get("session_id", f"session_{request_timestamp}") + para_chatbot_config = event_body.get("chatbot_config", {}) user_id = event_body.get("user_id", "default_user_id") - group_name = event_body.get("group_name", "Admin") - chatbot_id = event_body.get("chatbot_id", "admin") + group_name = para_chatbot_config.get("group_name", "Admin") + chatbot_id = para_chatbot_config.get("chatbot_id", "admin") ddb_history_obj = DynamoDBChatMessageHistory( sessions_table_name=sessions_table_name, From ce4ed91977d5ba690a9501575aa68027c2dd585e Mon Sep 17 00:00:00 2001 From: NingLyu Date: Wed, 23 Oct 2024 06:12:35 +0000 Subject: [PATCH 017/110] chore: update parameter --- source/lambda/online/lambda_main/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 728ffe952..2e4ae3f75 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -110,6 +110,7 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): user_id = event_body.get("user_id", "default_user_id") group_name = para_chatbot_config.get("group_name", "Admin") chatbot_id = para_chatbot_config.get("chatbot_id", "admin") + use_history = para_chatbot_config.get("use_history", "true").lower() == "true" ddb_history_obj = DynamoDBChatMessageHistory( sessions_table_name=sessions_table_name, @@ -128,7 +129,7 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): "user_id": user_id, "chatbot_config": { "chatbot_mode": "agent", - "use_history": True, + "use_history": use_history, }, "stream": False, } From 4865bb2add78110837bb17c82936b4b28b447b41 Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 23 Oct 2024 16:35:07 +0000 Subject: [PATCH 018/110] fix: fix front end create chatbot model selection --- source/infrastructure/cli/magic-config.ts | 7 +++---- .../pages/chatbotManagement/ChatbotManagement.tsx | 12 +++++++++--- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/source/infrastructure/cli/magic-config.ts b/source/infrastructure/cli/magic-config.ts index 6a14bfd7c..9acb7d2df 100644 --- a/source/infrastructure/cli/magic-config.ts +++ b/source/infrastructure/cli/magic-config.ts @@ -113,9 +113,8 @@ async function getAwsAccountAndRegion() { options.enableChat = config.chat.enabled; options.bedrockRegion = config.chat.bedrockRegion; options.enableConnect = config.chat.amazonConnect.enabled; - options.defaultEmbedding = (config.model.embeddingsModels ?? []).filter( - (m: any) => m.default - )[0].name; + options.defaultEmbedding = (config.model.embeddingsModels ?? []) + .find((m: any) => m.default)?.name ?? embeddingModels[0].name; options.defaultLlm = config.model.llms.find((m) => m.provider === "bedrock")?.name; options.sagemakerModelS3Bucket = config.model.modelConfig.modelAssetsBucket; options.enableUI = config.ui.enabled; @@ -516,4 +515,4 @@ async function processCreateOptions(options: any): Promise { ).create ? createConfig(config) : console.log("Skipping"); -} \ No newline at end of file +} diff --git a/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx b/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx index bf0311c36..445725598 100644 --- a/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx +++ b/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx @@ -73,10 +73,16 @@ const ChatbotManagement: React.FC = () => { const getModelList = async (type: 'create' | 'edit') => { const tempModels:{label: string; value:string}[] =[] - const BCEMBEDDING = [ - {"model_id": config?.embeddingEndpoint || "", "model_name": "BCEmbedding"}, + const BCE_EMBEDDING = [ + {"model_id": config?.embeddingEndpoint || "", "model_name": "BCE_Embedding"}, ] - const embedding_models = config?.kbEnabled === 'true' ? BCEMBEDDING : EMBEDDING_MODEL_LIST + let embedding_models = EMBEDDING_MODEL_LIST + + // Check if config?.embeddingEndpoint starts with "bce-embedding-and-bge-reranker" + if (config?.embeddingEndpoint?.startsWith("bce-embedding-and-bge-reranker")) { + embedding_models = [...BCE_EMBEDDING, ...EMBEDDING_MODEL_LIST] + } + embedding_models.forEach((item: {model_id: string; model_name: string})=>{ tempModels.push({ label: item.model_name, From 09e2d92bb31a634b6aa8a7153e5dd326556a957b Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Thu, 24 Oct 2024 11:16:48 +0800 Subject: [PATCH 019/110] update api params --- source/infrastructure/lib/api/api-stack.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/source/infrastructure/lib/api/api-stack.ts b/source/infrastructure/lib/api/api-stack.ts index ff899850b..a0b99897e 100644 --- a/source/infrastructure/lib/api/api-stack.ts +++ b/source/infrastructure/lib/api/api-stack.ts @@ -384,7 +384,8 @@ export class ApiConstruct extends Construct { data: { type: JsonSchemaType.OBJECT, properties: { s3Bucket: { type: JsonSchemaType.STRING }, - s3Prefix: { type: JsonSchemaType.STRING } + s3Prefix: { type: JsonSchemaType.STRING }, + url: {type: JsonSchemaType.STRING} } }, message: { type: JsonSchemaType.STRING } From 52cdcc8377c56180a19681f10c013af3da29d79e Mon Sep 17 00:00:00 2001 From: zhouxss Date: Thu, 24 Oct 2024 03:22:24 +0000 Subject: [PATCH 020/110] refactor: unified tool to adapt to langchian's tool --- .../langchain_integration/tools/__init__.py | 328 ++++++++++++++++++ .../tools/common_tools/__init__.py | 121 +++++++ .../tools/common_tools/chat.py | 5 + .../tools/common_tools/comparison_rag.py | 51 +++ .../tools/common_tools/get_weather.py | 34 ++ .../tools/common_tools/give_final_response.py | 4 + .../common_tools/give_rhetorical_question.py | 4 + .../tools/common_tools/rag.py | 65 ++++ .../tools/common_tools/step_back_rag.py | 50 +++ 9 files changed, 662 insertions(+) create mode 100644 source/lambda/online/langchain_integration/tools/__init__.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/__init__.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/chat.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/get_weather.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/rag.py create mode 100644 source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py diff --git a/source/lambda/online/langchain_integration/tools/__init__.py b/source/lambda/online/langchain_integration/tools/__init__.py new file mode 100644 index 000000000..91e937b24 --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/__init__.py @@ -0,0 +1,328 @@ +# from langchain.tools.base import StructuredTool,BaseTool,tool +# StructuredTool.from_function +# from langchain_experimental.tools import PythonREPLTool +# from langchain_core.utils.function_calling import convert_to_openai_function +# from llama_index.core.tools import FunctionTool +# from langchain.tools import BaseTool +# from pydantic import create_model + +# from langchain_community.tools import WikipediaQueryRun + + +# builder = StateGraph(State) + + +# # Define nodes: these do the work +# builder.add_node("assistant", Assistant(part_1_assistant_runnable)) +# builder.add_node("tools", create_tool_node_with_fallback(part_1_tools)) +# # Define edges: these determine how the control flow moves +# builder.add_edge(START, "assistant") +# builder.add_conditional_edges( +# "assistant", +# tools_condition, +# ) +# builder.add_edge("tools", "assistant") + +# # The checkpointer lets the graph persist its state +# # this is a complete memory for the entire graph. +# memory = MemorySaver() +# part_1_graph = builder.compile(checkpointer=memory) + +from typing import Optional,Union +from pydantic import BaseModel +import platform +import json +import inspect +from functools import wraps +import types + +from datamodel_code_generator import DataModelType, PythonVersion +from datamodel_code_generator.model import get_data_model_types +from datamodel_code_generator.parser.jsonschema import JsonSchemaParser +from langchain.tools.base import StructuredTool,BaseTool + +from common_logic.common_utils.constant import SceneType +from common_logic.common_utils.lambda_invoke_utils import invoke_with_lambda + + +class ToolIdentifier(BaseModel): + scene: SceneType + name: str + + @property + def tool_id(self): + return f"{self.scene}__{self.name}" + + +class ToolManager: + tool_map = {} + + @staticmethod + def convert_tool_def_to_pydantic(tool_id,tool_def:Union[dict,BaseModel]): + if not isinstance(tool_def,dict): + return tool_def + # convert tool definition to pydantic model + current_python_version = ".".join(platform.python_version().split(".")[:-1]) + data_model_types = get_data_model_types( + DataModelType.PydanticBaseModel, + target_python_version=PythonVersion(current_python_version) + ) + parser = JsonSchemaParser( + json.dumps(tool_def,ensure_ascii=False,indent=2), + data_model_type=data_model_types.data_model, + data_model_root_type=data_model_types.root_model, + data_model_field_type=data_model_types.field_model, + data_type_manager_type=data_model_types.data_type_manager, + dump_resolve_reference_action=data_model_types.dump_resolve_reference_action, + use_schema_description=True + ) + result = parser.parse() + new_tool_module = types.ModuleType(tool_id) + exec(result, new_tool_module.__dict__) + return new_tool_module.Model + + + @staticmethod + def get_tool_identifier(scene=None,name=None,tool_identifier=None): + if tool_identifier is None: + tool_identifier = ToolIdentifier(scene=scene,name=name) + return tool_identifier + + + @classmethod + def register_lc_tool( + cls, + tool:BaseTool, + scene=None, + name=None, + tool_identifier=None, + ): + tool_identifier = cls.get_tool_identifier( + scene=scene, + name=name, + tool_identifier=None + ) + assert isinstance(tool,BaseTool),(tool,type(tool)) + cls.tool_map[tool_identifier.tool_id] = tool + + + @classmethod + def register_func_as_tool( + cls, + func:callable, + tool_def:dict, + return_direct:False, + scene=None, + name=None, + tool_identifier=None, + ): + tool_identifier = cls.get_tool_identifier( + scene=scene, + name=name, + tool_identifier=tool_identifier + ) + tool = StructuredTool.from_function( + func=func, + name=tool_identifier.name, + args_schema=ToolManager.convert_tool_def_to_pydantic( + tool_id=tool_identifier.tool_id, + tool_def=tool_def + ), + return_direct=return_direct + ) + # register tool + ToolManager.register_lc_tool( + tool_identifier=tool_identifier, + tool=tool + ) + + + @classmethod + def register_aws_lambda_as_tool( + cls, + lambda_name:str, + tool_def:dict, + scene=None, + name=None, + tool_identifier=None, + return_direct=False + ): + + def _func(**kargs): + return invoke_with_lambda(lambda_name,kargs) + + tool_identifier = cls.get_tool_identifier( + scene=scene, + name=name, + tool_identifier=tool_identifier + ) + tool = StructuredTool.from_function( + func=_func, + name=tool_identifier.name, + args_schema=ToolManager.convert_tool_def_to_pydantic( + tool_id=tool_identifier.tool_id, + tool_def=tool_def + ), + return_direct=return_direct + ) + ToolManager.register_lc_tool( + tool_identifier=tool_identifier, + tool=tool + ) + + + + @classmethod + def get_tool(cls, scene, name,**kwargs): + # dynamic import + tool_identifier = ToolIdentifier(scene=scene, name=name) + tool_id = tool_identifier.tool_id + if tool_id not in cls.tool_map: + TOOL_MOFULE_LOAD_FN_MAP[tool_id](**kwargs) + return cls.tool_map[tool_id] + + +TOOL_MOFULE_LOAD_FN_MAP = {} + + +def lazy_tool_load_decorator(scene:SceneType,name): + def decorator(func): + tool_identifier = ToolIdentifier(scene=scene, name=name) + @wraps(func) + def wrapper(*args, **kwargs): + if "tool_identifier" in inspect.signature(func).parameters: + kwargs = {**kwargs,"tool_identifier":tool_identifier} + return func(*args, **kwargs) + TOOL_MOFULE_LOAD_FN_MAP[tool_identifier.tool_id] = func + return wrapper + return decorator + + +############################# tool load func ###################### + + +@lazy_tool_load_decorator(SceneType.COMMON,"get_weather") +def _load_common_weather_tool(tool_identifier:ToolIdentifier): + from .common_tools import get_weather + tool_def = { + "description": "Get the current weather for `city_name`", + "properties": { + "city_name": { + "description": "The name of the city to be queried", + "type": "string" + }, + }, + "required": ["city_name"] + } + ToolManager.register_func_as_tool( + tool_identifier.scene, + tool_identifier.name, + get_weather.get_weather, + tool_def, + return_direct=False + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"give_rhetorical_question") +def _load_common_rhetorical_tool(tool_identifier:ToolIdentifier): + from .common_tools import give_rhetorical_question + tool_def = { + "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", + "properties": { + "question": { + "description": "The rhetorical question to user", + "type": "string" + }, + } + } + ToolManager.register_func_as_tool( + tool_identifier.scene, + tool_identifier.name, + give_rhetorical_question.give_rhetorical_question, + tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"give_final_response") +def _load_common_final_response_tool(tool_identifier:ToolIdentifier): + from .common_tools import give_final_response + + tool_def = { + "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", + "properties": { + "response": { + "description": "Response to user", + "type": "string" + } + }, + "required": ["response"] + } + ToolManager.register_func_as_tool( + tool_identifier.scene, + tool_identifier.name, + give_final_response.give_final_response, + tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"chat") +def _load_common_chat_tool(tool_identifier:ToolIdentifier): + from .common_tools import chat + tool_def = { + "description": "casual talk with AI", + "properties": { + "response": { + "description": "response to users", + "type": "string" + } + }, + "required": ["response"] + } + + ToolManager.register_func_as_tool( + tool_identifier.scene, + tool_identifier.name, + chat.chat, + tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"rag_tool") +def _load_common_rag_tool(tool_identifier:ToolIdentifier): + from .common_tools import rag + tool_def = { + "description": "private knowledge", + "properties": { + "query": { + "description": "query for retrieve", + "type": "string" + } + }, + "required": ["query"] + } + ToolManager.register_func_as_tool( + tool_identifier.scene, + tool_identifier.name, + rag.rag, + tool_def, + return_direct=True + ) + + + + + + + + + + + + + + + + diff --git a/source/lambda/online/langchain_integration/tools/common_tools/__init__.py b/source/lambda/online/langchain_integration/tools/common_tools/__init__.py new file mode 100644 index 000000000..c57069898 --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/__init__.py @@ -0,0 +1,121 @@ +from common_logic.common_utils.constant import SceneType, ToolRuningMode +from .._tool_base import tool_manager +from . import ( + get_weather, + give_rhetorical_question, + give_final_response, + chat, + rag +) + + +SCENE = SceneType.COMMON +LAMBDA_NAME = "lambda_common_tools" + +tool_manager.register_tool({ + "name": "get_weather", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": get_weather.lambda_handler, + "tool_def": { + "name": "get_weather", + "description": "Get the current weather for `city_name`", + "parameters": { + "type": "object", + "properties": { + "city_name": { + "description": "The name of the city to be queried", + "type": "string" + }, + }, + "required": ["city_name"] + } + }, + "running_mode": ToolRuningMode.LOOP +}) + + +tool_manager.register_tool( + { + "name": "give_rhetorical_question", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": give_rhetorical_question.lambda_handler, + "tool_def": { + "name": "give_rhetorical_question", + "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", + "parameters": { + "type": "object", + "properties": { + "question": { + "description": "The rhetorical question to user", + "type": "string" + }, + }, + "required": ["question"], + }, + }, + "running_mode": ToolRuningMode.ONCE + } +) + + +tool_manager.register_tool( + { + "name": "give_final_response", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": give_final_response.lambda_handler, + "tool_def": { + "name": "give_final_response", + "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", + "parameters": { + "type": "object", + "properties": { + "response": { + "description": "Response to user", + "type": "string" + } + }, + "required": ["response"] + }, + }, + "running_mode": ToolRuningMode.ONCE + } +) + + +tool_manager.register_tool({ + "name": "chat", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": chat.lambda_handler, + "tool_def": { + "name": "chat", + "description": "casual talk with AI", + "parameters": { + "type": "object", + "properties": { + "response": { + "description": "response to users", + "type": "string" + }}, + "required": ["response"] + }, + }, + "running_mode": ToolRuningMode.ONCE +}) + + +tool_manager.register_tool({ + "name": "rag_tool", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": rag.lambda_handler, + "tool_def": { + "name": "rag_tool", + "description": "private knowledge", + "parameters": {} + }, + "running_mode": ToolRuningMode.ONCE +}) diff --git a/source/lambda/online/langchain_integration/tools/common_tools/chat.py b/source/lambda/online/langchain_integration/tools/common_tools/chat.py new file mode 100644 index 000000000..c007c3534 --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/chat.py @@ -0,0 +1,5 @@ +# give chat response + +def chat(response:str): + return response + \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py b/source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py new file mode 100644 index 000000000..3bf573967 --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py @@ -0,0 +1,51 @@ +# knowledge base retrieve +from common_logic.common_utils.lambda_invoke_utils import invoke_lambda +from common_logic.common_utils.constant import ( + LLMTaskType +) + +def knowledge_base_retrieve(retriever_params, context=None): + output: str = invoke_lambda( + event_body=retriever_params, + lambda_name="Online_Functions", + lambda_module_path="functions.functions_utils.retriever.retriever", + handler_name="lambda_handler", + ) + contexts = [doc["page_content"] for doc in output["result"]["docs"]] + return contexts + +def lambda_handler(event_body, context=None): + state = event_body['state'] + retriever_params = state["chatbot_config"]["comparison_rag_config"]["retriever_config"] + contexts = [] + retriever_params["query"] = event_body['kwargs']['query_a'] + contexts.extend(knowledge_base_retrieve(retriever_params, context=context)) + retriever_params["query"] = event_body['kwargs']['query_b'] + contexts.extend(knowledge_base_retrieve(retriever_params, context=context)) + context = "\n\n".join(contexts) + + # llm generate + system_prompt = (f"请根据context内的信息回答问题:\n" + "\n" + " - 回复内容需要展现出礼貌。回答内容为一句话,言简意赅。\n" + " - 使用中文回答。\n" + "\n" + "\n" + f"{context}\n" + "" + ) + + output:str = invoke_lambda( + lambda_name='Online_LLM_Generate', + lambda_module_path="lambda_llm_generate.llm_generate", + handler_name='lambda_handler', + event_body={ + "llm_config": { + **state['chatbot_config']['rag_daily_reception_config']['llm_config'], + "system_prompt": system_prompt, + "intent_type": LLMTaskType.CHAT}, + "llm_input": {"query": state['query'], "chat_history": state['chat_history']} + } + ) + + return {"code":0, "result":output} \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/common_tools/get_weather.py b/source/lambda/online/langchain_integration/tools/common_tools/get_weather.py new file mode 100644 index 000000000..ccecb204c --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/get_weather.py @@ -0,0 +1,34 @@ +# get weather tool +import requests + +def get_weather(city_name:str): + if not isinstance(city_name, str): + raise TypeError("City name must be a string") + + key_selection = { + "current_condition": [ + "temp_C", + "FeelsLikeC", + "humidity", + "weatherDesc", + "observation_time", + ], + } + + try: + resp = requests.get(f"https://wttr.in/{city_name}?format=j1") + resp.raise_for_status() + resp = resp.json() + ret = {k: {_v: resp[k][0][_v] for _v in v} for k, v in key_selection.items()} + except: + import traceback + + ret = ("Error encountered while fetching weather data!\n" + traceback.format_exc() + ) + + return str(ret) + + +def lambda_handler(event_body,context=None): + result = get_weather(**event_body['kwargs']) + return {"code":0, "result": result} \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py b/source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py new file mode 100644 index 000000000..82146d9b0 --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py @@ -0,0 +1,4 @@ +# give final response tool + +def give_final_response(response:str): + return response \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py b/source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py new file mode 100644 index 000000000..ac78268af --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py @@ -0,0 +1,4 @@ +# give rhetorical question + +def give_rhetorical_question(question:str): + return question \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/langchain_integration/tools/common_tools/rag.py new file mode 100644 index 000000000..2537bb5ca --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/rag.py @@ -0,0 +1,65 @@ +from common_logic.common_utils.lambda_invoke_utils import invoke_lambda +from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb +from common_logic.common_utils.constant import ( + LLMTaskType +) +from common_logic.common_utils.lambda_invoke_utils import send_trace + + +def rag(query,state): + # state = event_body['state'] + context_list = [] + # add qq match results + context_list.extend(state['qq_match_results']) + figure_list = [] + retriever_params = state["chatbot_config"]["private_knowledge_config"] + retriever_params["query"] = state[retriever_params.get("retriever_config",{}).get("query_key","query")] + output: str = invoke_lambda( + event_body=retriever_params, + lambda_name="Online_Functions", + lambda_module_path="functions.functions_utils.retriever.retriever", + handler_name="lambda_handler", + ) + + for doc in output["result"]["docs"]: + context_list.append(doc["page_content"]) + figure_list = figure_list + doc.get("figure",[]) + + # Remove duplicate figures + unique_set = {tuple(d.items()) for d in figure_list} + unique_figure_list = [dict(t) for t in unique_set] + state['extra_response']['figures'] = unique_figure_list + + send_trace(f"\n\n**rag-contexts:** {context_list}", enable_trace=state["enable_trace"]) + + group_name = state['chatbot_config']['group_name'] + llm_config = state["chatbot_config"]["private_knowledge_config"]['llm_config'] + chatbot_id = state["chatbot_config"]["chatbot_id"] + task_type = LLMTaskType.RAG + prompt_templates_from_ddb = get_prompt_templates_from_ddb( + group_name, + model_id=llm_config['model_id'], + task_type=task_type, + chatbot_id=chatbot_id + ) + + output: str = invoke_lambda( + lambda_name="Online_LLM_Generate", + lambda_module_path="lambda_llm_generate.llm_generate", + handler_name="lambda_handler", + event_body={ + "llm_config": { + **prompt_templates_from_ddb, + **llm_config, + "stream": state["stream"], + "intent_type": task_type, + }, + "llm_input": { + "contexts": context_list, + "query": state["query"], + "chat_history": state["chat_history"], + }, + }, + ) + return output + diff --git a/source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py b/source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py new file mode 100644 index 000000000..cbc09a57d --- /dev/null +++ b/source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py @@ -0,0 +1,50 @@ +# knowledge base retrieve +from common_logic.common_utils.lambda_invoke_utils import invoke_lambda +from common_logic.common_utils.constant import ( + LLMTaskType +) + +def knowledge_base_retrieve(retriever_params, context=None): + output: str = invoke_lambda( + event_body=retriever_params, + lambda_name="Online_Functions", + lambda_module_path="functions.functions_utils.retriever.retriever", + handler_name="lambda_handler", + ) + contexts = [doc["page_content"] for doc in output["result"]["docs"]] + return contexts + +def lambda_handler(event_body, context=None): + state = event_body['state'] + retriever_params = state["chatbot_config"]["step_back_rag_config"]["retriever_config"] + contexts = [] + retriever_params["query"] = event_body['kwargs']['step_back_query'] + contexts.extend(knowledge_base_retrieve(retriever_params, context=context)) + context = "\n\n".join(contexts) + + # llm generate + system_prompt = (f"请根据context内的信息回答问题:\n" + "\n" + " - 回复内容需要展现出礼貌。回答内容为一句话,言简意赅。\n" + " - 每次回答总是先进行思考,并将思考过程写在标签中。\n" + " - 使用中文回答。\n" + "\n" + "\n" + f"{context}\n" + "" + ) + + output:str = invoke_lambda( + lambda_name='Online_LLM_Generate', + lambda_module_path="lambda_llm_generate.llm_generate", + handler_name='lambda_handler', + event_body={ + "llm_config": { + **state['chatbot_config']['rag_daily_reception_config']['llm_config'], + "system_prompt": system_prompt, + "intent_type": LLMTaskType.CHAT}, + "llm_input": {"query": state['query'], "chat_history": state['chat_history']} + } + ) + + return {"code":0, "result":output} \ No newline at end of file From d62f3a9a54f2d054cf139ca99d82d17f5fa1caa5 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Thu, 24 Oct 2024 03:24:55 +0000 Subject: [PATCH 021/110] refactor: modify module import --- .../chains/chat_chain.py | 4 +-- .../chains/conversation_summary_chain.py | 4 +-- .../chains/hyde_chain.py | 2 +- .../chains/intention_chain.py | 4 +-- .../chains/llm_chain_base.py | 26 ------------------- .../chains/query_rewrite_chain.py | 2 +- .../langchain_integration/chains/rag_chain.py | 21 +++++++++++++-- .../retail_chains/auto_evaluation_chain.py | 2 +- .../retail_conversation_summary_chain.py | 2 +- .../retail_tool_calling_chain_claude_xml.py | 2 +- .../retail_tool_calling_chain_json.py | 2 +- .../chains/stepback_chain.py | 4 +-- .../chains/tool_calling_chain_claude_xml.py | 4 +-- 13 files changed, 35 insertions(+), 44 deletions(-) delete mode 100644 source/lambda/online/langchain_integration/chains/llm_chain_base.py diff --git a/source/lambda/online/langchain_integration/chains/chat_chain.py b/source/lambda/online/langchain_integration/chains/chat_chain.py index 730a84904..35bdb41c0 100644 --- a/source/lambda/online/langchain_integration/chains/chat_chain.py +++ b/source/lambda/online/langchain_integration/chains/chat_chain.py @@ -6,8 +6,8 @@ from langchain_core.messages import convert_to_messages -from ..llm_models import Model -from .llm_chain_base import LLMChain +from ..chat_models import Model +from . import LLMChain from common_logic.common_utils.constant import ( MessageType, diff --git a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py index c3f1aa1db..c7f0631f1 100644 --- a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py +++ b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py @@ -7,9 +7,9 @@ ) -from ..llm_models import Model +from ..chat_models import Model from .chat_chain import Iternlm2Chat7BChatChain -from .llm_chain_base import LLMChain +from . import LLMChain from common_logic.common_utils.constant import ( MessageType, LLMTaskType, diff --git a/source/lambda/online/langchain_integration/chains/hyde_chain.py b/source/lambda/online/langchain_integration/chains/hyde_chain.py index de3b0f0dd..8fda3f2ca 100644 --- a/source/lambda/online/langchain_integration/chains/hyde_chain.py +++ b/source/lambda/online/langchain_integration/chains/hyde_chain.py @@ -17,7 +17,7 @@ from ..chains import LLMChain from ..chat_models import Model as LLM_Model from .chat_chain import Iternlm2Chat7BChatChain -from .llm_chain_base import LLMChain +from . import LLMChain HYDE_TYPE = LLMTaskType.HYDE_TYPE diff --git a/source/lambda/online/langchain_integration/chains/intention_chain.py b/source/lambda/online/langchain_integration/chains/intention_chain.py index 292023fda..4c2d3d202 100644 --- a/source/lambda/online/langchain_integration/chains/intention_chain.py +++ b/source/lambda/online/langchain_integration/chains/intention_chain.py @@ -13,9 +13,9 @@ ) from common_logic.common_utils.constant import LLMTaskType,LLMModelType -from ..llm_models import Model +from ..chat_models import Model from .chat_chain import Iternlm2Chat7BChatChain -from .llm_chain_base import LLMChain +from . import LLMChain abs_dir = os.path.dirname(__file__) diff --git a/source/lambda/online/langchain_integration/chains/llm_chain_base.py b/source/lambda/online/langchain_integration/chains/llm_chain_base.py deleted file mode 100644 index 98ae93d34..000000000 --- a/source/lambda/online/langchain_integration/chains/llm_chain_base.py +++ /dev/null @@ -1,26 +0,0 @@ -class LLMChainMeta(type): - def __new__(cls, name, bases, attrs): - new_cls = type.__new__(cls, name, bases, attrs) - if name == "LLMChain": - return new_cls - new_cls.model_map[new_cls.get_chain_id()] = new_cls - return new_cls - - -class LLMChain(metaclass=LLMChainMeta): - model_map = {} - - @classmethod - def get_chain_id(cls): - return cls._get_chain_id(cls.model_id, cls.intent_type) - - @staticmethod - def _get_chain_id(model_id, intent_type): - return f"{model_id}__{intent_type}" - - @classmethod - def get_chain(cls, model_id, intent_type, model_kwargs=None, **kwargs): - return cls.model_map[cls._get_chain_id(model_id, intent_type)].create_chain( - model_kwargs=model_kwargs, **kwargs - ) - diff --git a/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py b/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py index 331552a1a..9379b84e0 100644 --- a/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py +++ b/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py @@ -14,7 +14,7 @@ from ..chains import LLMChain from ..chat_models import Model as LLM_Model from .chat_chain import Iternlm2Chat7BChatChain -from .llm_chain_base import LLMChain +from . import LLMChain QUERY_REWRITE_TYPE = LLMTaskType.QUERY_REWRITE_TYPE query_expansion_template_claude = PromptTemplate.from_template("""You are an AI language model assistant. Your task is to generate 1 - 5 different sub questions OR alternate versions of the given user question to retrieve relevant documents from a vector database. diff --git a/source/lambda/online/langchain_integration/chains/rag_chain.py b/source/lambda/online/langchain_integration/chains/rag_chain.py index f04750f64..be9d42efa 100644 --- a/source/lambda/online/langchain_integration/chains/rag_chain.py +++ b/source/lambda/online/langchain_integration/chains/rag_chain.py @@ -14,8 +14,8 @@ from common_logic.common_utils.logger_utils import print_llm_messages # from ...prompt_template import convert_chat_history_from_fstring_format -from ..llm_models import Model -from .llm_chain_base import LLMChain +from ..chat_models import Model +from . import LLMChain def get_claude_rag_context(contexts: list): @@ -81,10 +81,27 @@ class Claude3SonnetRAGLLMChain(Claude2RagLLMChain): class Claude3HaikuRAGLLMChain(Claude2RagLLMChain): model_id = LLMModelType.CLAUDE_3_HAIKU +class Claude35SonnetRAGLLMChain(Claude2RagLLMChain): + model_id = LLMModelType.CLAUDE_3_5_SONNET + + +class Llama31Instruct70B(Claude2RagLLMChain): + model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + + +class MistraLlarge2407(Claude2RagLLMChain): + model_id = LLMModelType.MISTRAL_LARGE_2407 + + +class CohereCommandRPlus(Claude2RagLLMChain): + model_id = LLMModelType.COHERE_COMMAND_R_PLUS + + class Mixtral8x7bChatChain(Claude2RagLLMChain): model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + from .chat_chain import GLM4Chat9BChatChain class GLM4Chat9BRagChain(GLM4Chat9BChatChain): diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py b/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py index bcdd7011d..28d4b22c0 100644 --- a/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py +++ b/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py @@ -12,7 +12,7 @@ LLMModelType, ) from ...llm_models import Model -from ..llm_chain_base import LLMChain +from ..__llm_chain_base import LLMChain from ..chat_chain import Claude2ChatChain diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py index d5be022ef..eae0716d6 100644 --- a/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py @@ -8,7 +8,7 @@ from ...llm_models import Model -from ..llm_chain_base import LLMChain +from ..__llm_chain_base import LLMChain from common_logic.common_utils.constant import ( MessageType, LLMTaskType, diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py index 803e4ef23..71a953c5a 100644 --- a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py @@ -22,7 +22,7 @@ SceneType ) from functions import get_tool_by_name -from ..llm_chain_base import LLMChain +from ..__llm_chain_base import LLMChain from ...llm_models import Model tool_call_guidelines = """ diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py index d20bb6c03..f1bc5d8b0 100644 --- a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py +++ b/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py @@ -25,7 +25,7 @@ ) from functions import get_tool_by_name -from ..llm_chain_base import LLMChain +from ..__llm_chain_base import LLMChain from ...llm_models import Model from ..chat_chain import GLM4Chat9BChatChain from common_logic.common_utils.logger_utils import get_logger diff --git a/source/lambda/online/langchain_integration/chains/stepback_chain.py b/source/lambda/online/langchain_integration/chains/stepback_chain.py index 4a14db1d1..01e21bb47 100644 --- a/source/lambda/online/langchain_integration/chains/stepback_chain.py +++ b/source/lambda/online/langchain_integration/chains/stepback_chain.py @@ -8,8 +8,8 @@ LLMTaskType, LLMModelType ) -from ..chains.chat_chain import Iternlm2Chat7BChatChain -from ..chains.llm_chain_base import LLMChain +from .chat_chain import Iternlm2Chat7BChatChain +from . import LLMChain from ..chat_models import Model STEPBACK_PROMPTING_TYPE = LLMTaskType.STEPBACK_PROMPTING_TYPE diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py index 9d6b84b38..114139f84 100644 --- a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py +++ b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py @@ -24,8 +24,8 @@ ) from common_logic.common_utils.time_utils import get_china_now -from .llm_chain_base import LLMChain -from ..llm_models import Model +from . import LLMChain +from ..chat_models import Model incorrect_tool_call_example = """Here is an example of an incorrectly formatted tool call, which you should avoid. From e3f063ee33542730396a3c3e30c7a6b7c8449a2a Mon Sep 17 00:00:00 2001 From: zhouxss Date: Thu, 24 Oct 2024 03:26:49 +0000 Subject: [PATCH 022/110] refactor: add llm chain tool_calling_api and it's prompt template --- .../common_logic/common_utils/prompt_utils.py | 108 ++++-- .../chains/tool_calling_chain_claude_api.py | 320 ++++++++++++++++++ 2 files changed, 392 insertions(+), 36 deletions(-) create mode 100644 source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py diff --git a/source/lambda/online/common_logic/common_utils/prompt_utils.py b/source/lambda/online/common_logic/common_utils/prompt_utils.py index 7d146a693..f2a9c3f56 100644 --- a/source/lambda/online/common_logic/common_utils/prompt_utils.py +++ b/source/lambda/online/common_logic/common_utils/prompt_utils.py @@ -17,7 +17,9 @@ EXPORT_MODEL_IDS = [ LLMModelType.CLAUDE_3_HAIKU, LLMModelType.CLAUDE_3_SONNET, - # LLMModelType.CLAUDE_3_5_SONNET, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS ] EXPORT_SCENES = [ @@ -142,7 +144,10 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.CLAUDE_3_SONNET, LLMModelType.CLAUDE_3_5_SONNET, LLMModelType.CLAUDE_INSTANCE, - LLMModelType.MIXTRAL_8X7B_INSTRUCT + LLMModelType.MIXTRAL_8X7B_INSTRUCT, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.RAG, prompt_template=CLAUDE_RAG_SYSTEM_PROMPT, @@ -171,37 +176,9 @@ def prompt_template_render(self, prompt_template: dict): ) -# CHIT_CHAT_SYSTEM_TEMPLATE = "你是一个AI助理。今天是{date},{weekday}. " - -# register_prompt_templates( -# model_ids=[ -# LLMModelType.CLAUDE_2, -# LLMModelType.CLAUDE_21, -# LLMModelType.CLAUDE_3_HAIKU, -# LLMModelType.CLAUDE_3_SONNET, -# LLMModelType.CLAUDE_3_5_SONNET, -# LLMModelType.CLAUDE_INSTANCE, -# LLMModelType.MIXTRAL_8X7B_INSTRUCT, -# LLMModelType.GLM_4_9B_CHAT, -# LLMModelType.QWEN2INSTRUCT72B, -# LLMModelType.QWEN2INSTRUCT7B -# ], -# task_type=LLMTaskType.CHAT, -# prompt_template=CHIT_CHAT_SYSTEM_TEMPLATE, -# prompt_name="system_prompt" -# ) - - -# CQR_TEMPLATE = """Given the following conversation between `USER` and `AI`, and a follow up `USER` reply, Put yourself in the shoes of `USER`, rephrase the follow up \ -# `USER` reply to be a standalone reply. - -# Chat History: -# {history} - -# The USER's follow up reply: {question}""" - - +################ # query rewrite prompt template from paper https://arxiv.org/pdf/2401.10225 +################### CQR_SYSTEM_PROMPT = """You are a helpful, pattern-following assistant.""" CQR_USER_PROMPT_TEMPLATE = """Given the following conversation between PersonU and PersonA: @@ -284,7 +261,11 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.MIXTRAL_8X7B_INSTRUCT, LLMModelType.QWEN2INSTRUCT72B, LLMModelType.QWEN2INSTRUCT7B, - LLMModelType.GLM_4_9B_CHAT + LLMModelType.GLM_4_9B_CHAT, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, + ], task_type=LLMTaskType.CONVERSATION_SUMMARY_TYPE, prompt_template=CQR_SYSTEM_PROMPT, @@ -302,7 +283,10 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.MIXTRAL_8X7B_INSTRUCT, LLMModelType.QWEN2INSTRUCT72B, LLMModelType.QWEN2INSTRUCT7B, - LLMModelType.GLM_4_9B_CHAT + LLMModelType.GLM_4_9B_CHAT, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.CONVERSATION_SUMMARY_TYPE, prompt_template=CQR_USER_PROMPT_TEMPLATE, @@ -321,14 +305,19 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.MIXTRAL_8X7B_INSTRUCT, LLMModelType.QWEN2INSTRUCT72B, LLMModelType.QWEN2INSTRUCT7B, - LLMModelType.GLM_4_9B_CHAT + LLMModelType.GLM_4_9B_CHAT, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.CONVERSATION_SUMMARY_TYPE, prompt_template=json.dumps(CQR_FEW_SHOTS, ensure_ascii=False, indent=2), prompt_name="few_shots" ) -# agent prompt + + +############## xml agent prompt ############# AGENT_USER_PROMPT = "你是一个AI助理。今天是{date},{weekday}. " register_prompt_templates( model_ids=[ @@ -362,12 +351,59 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.CLAUDE_3_HAIKU, LLMModelType.CLAUDE_3_SONNET, LLMModelType.CLAUDE_3_5_SONNET, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.TOOL_CALLING_XML, prompt_template=AGENT_GUIDELINES_PROMPT, prompt_name="guidelines_prompt" ) +################# api agent prompt ##################### +AGENT_USER_PROMPT = "你是一个AI助理。今天是{date},{weekday}. " +register_prompt_templates( + model_ids=[ + LLMModelType.CLAUDE_3_HAIKU, + LLMModelType.CLAUDE_3_SONNET, + LLMModelType.CLAUDE_3_5_SONNET, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, + ], + task_type=LLMTaskType.TOOL_CALLING_API, + prompt_template=AGENT_USER_PROMPT, + prompt_name="user_prompt" +) + +AGENT_GUIDELINES_PROMPT = """ +- 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考: + 1. 判断根据当前的上下文是否足够回答用户的问题。 + 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 + 3. 如果当前的上下文不能支持回答用户的问题,你可以考虑调用提供的工具。 + 4. 如果调用工具对应的参数不够,请调用反问工具 `give_rhetorical_question` 来让用户提供更加充分的信息。如果调用工具不需要参数,则不需要调用反问工具。 + 5. 最后给出你要调用的工具名称。 +- Always output with the same language as user's query. If the content is english, use englisth to output. If the content is Chinese, use Chinese to output. + +""" + +register_prompt_templates( + model_ids=[ + LLMModelType.CLAUDE_2, + LLMModelType.CLAUDE_21, + LLMModelType.CLAUDE_3_HAIKU, + LLMModelType.CLAUDE_3_SONNET, + LLMModelType.CLAUDE_3_5_SONNET, + LLMModelType.LLAMA3_1_70B_INSTRUCT, + LLMModelType.MISTRAL_LARGE_2407, + LLMModelType.COHERE_COMMAND_R_PLUS, + ], + task_type=LLMTaskType.TOOL_CALLING_API, + prompt_template=AGENT_GUIDELINES_PROMPT, + prompt_name="guidelines_prompt" +) + + if __name__ == "__main__": print(get_all_templates()) diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py new file mode 100644 index 000000000..55d88f958 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py @@ -0,0 +1,320 @@ +# tool calling chain +import json +from typing import List,Dict,Any +import re + +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough +) +from common_logic.common_utils.prompt_utils import get_prompt_template +from common_logic.common_utils.logger_utils import print_llm_messages +from langchain_core.messages import( + AIMessage, + SystemMessage +) +from langchain.prompts import ChatPromptTemplate +from langchain_core.messages import AIMessage,SystemMessage +from langchain.tools.base import BaseTool + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType, + MessageType +) +from common_logic.common_utils.time_utils import get_china_now + +from . import LLMChain +from ..chat_models import Model + +# incorrect_tool_call_example = """Here is an example of an incorrectly formatted tool call, which you should avoid. +# +# +# +# tool_name +# +# +# question +# string +# value +# +# +# +# +# + +# In this incorrect tool calling example, the parameter `name` should form a XLM tag. +# """ + + +# SYSTEM_MESSAGE_PROMPT =(f"In this environment you have access to a set of tools you can use to answer the user's question.\n" +# "\n" +# "You may call them like this:\n" +# "\n" +# "\n" +# "$TOOL_NAME\n" +# "\n" +# "<$PARAMETER_NAME>$PARAMETER_VALUE\n" +# "...\n" +# "\n" +# "\n" +# "\n" +# "\n" +# "Here are the tools available:\n" +# "\n" +# "{tools}" +# "\n" +# "\nAnswer the user's request using relevant tools (if they are available). Before calling a tool, do some analysis within tags. First, think about which of the provided tools is the relevant tool to answer the user's request. Second, go through each of the required parameters of the relevant tool and determine if the user has directly provided or given enough information to infer a value. When deciding if the parameter can be inferred, carefully consider all the context to see if it supports a specific value. If all of the required parameters are present or can be reasonably inferred, close the thinking tag and proceed with the tool call. BUT, if one of the values for a required parameter is missing, DO NOT invoke the function (not even with fillers for the missing params) and instead, ask the user to provide the missing parameters. DO NOT ask for more information on optional parameters if it is not provided." +# "\nHere are some guidelines for you:\n{tool_call_guidelines}." +# f"\n{incorrect_tool_call_example}" +# ) + +# SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES = SYSTEM_MESSAGE_PROMPT + ( +# "Some examples of tool calls are given below, where the content within represents the most recent reply in the dialog." +# "\n{fewshot_examples}" +# ) + +# TOOL_FORMAT = """ +# {tool_name} +# {tool_description} +# +# {formatted_required_parameters} +# +# +# {formatted_optional_parameters} +# +# """ + +# TOOL_PARAMETER_FORMAT = """ +# {parameter_name} +# {parameter_type} +# {parameter_description} +# """ + +# TOOL_EXECUTE_SUCCESS_TEMPLATE = """ +# +# +# {tool_name} +# +# {result} +# +# +# +# """ + +# TOOL_EXECUTE_FAIL_TEMPLATE = """ +# +# +# {error} +# +# +# """ + +# AGENT_SYSTEM_PROMPT = "你是一个亚马逊云科技的AI助理,你的名字是亚麻小Q。今天是{date_str},{weekday}. " + + +# def _get_type(parameter: Dict[str, Any]) -> str: +# if "type" in parameter: +# return parameter["type"] +# if "anyOf" in parameter: +# return json.dumps({"anyOf": parameter["anyOf"]}) +# if "allOf" in parameter: +# return json.dumps({"allOf": parameter["allOf"]}) +# return json.dumps(parameter) + + +# def convert_openai_tool_to_anthropic(tools:list[dict])->str: +# formatted_tools = tools +# tools_data = [ +# { +# "tool_name": tool["name"], +# "tool_description": tool["description"], +# "formatted_required_parameters": "\n".join( +# [ +# TOOL_PARAMETER_FORMAT.format( +# parameter_name=name, +# parameter_type=_get_type(parameter), +# parameter_description=parameter.get("description"), +# ) for name, parameter in tool["parameters"]["properties"].items() +# if name in tool["parameters"].get("required", []) +# ] +# ), +# "formatted_optional_parameters": "\n".join( +# [ +# TOOL_PARAMETER_FORMAT.format( +# parameter_name=name, +# parameter_type=_get_type(parameter), +# parameter_description=parameter.get("description"), +# ) for name, parameter in tool["parameters"]["properties"].items() +# if name not in tool["parameters"].get("required", []) +# ] +# ), +# } +# for tool in formatted_tools +# ] +# tools_formatted = "\n".join( +# [ +# TOOL_FORMAT.format( +# tool_name=tool["tool_name"], +# tool_description=tool["tool_description"], +# formatted_required_parameters=tool["formatted_required_parameters"], +# formatted_optional_parameters=tool["formatted_optional_parameters"], +# ) +# for tool in tools_data +# ] +# ) +# return tools_formatted + + +class Claude2ToolCallingChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.TOOL_CALLING_API + default_model_kwargs = { + "max_tokens": 2000, + "temperature": 0.1, + "top_p": 0.9 + } + + @staticmethod + def format_fewshot_examples(fewshot_examples:list[dict]): + fewshot_example_strs = [] + for fewshot_example in fewshot_examples: + param_strs = [] + for p,v in fewshot_example['kwargs'].items(): + param_strs.append(f"<{p}>{v}\n" + f"{fewshot_example['query']}\n" + f"\n" + "\n" + "\n" + f"{fewshot_example['name']}\n" + "\n" + f"{param_str}" + "\n" + "\n" + "\n" + "\n" + "" + ) + fewshot_example_strs.append(fewshot_example_str) + fewshot_example_str = '\n'.join(fewshot_example_strs) + return f"\n{fewshot_example_str}\n" + + @classmethod + def parse_function_calls_from_ai_message(cls,message:AIMessage): + content = "" + message.content + "" + function_calls:List[str] = re.findall("(.*?)", content,re.S) + if not function_calls: + content = "" + message.content + + return { + "function_calls": function_calls, + "content": content + } + + @classmethod + def create_chat_history(cls,x): + chat_history = x['chat_history'] + \ + [{"role": MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ + x['agent_tool_history'] + return chat_history + + @classmethod + def get_common_system_prompt(cls,system_prompt_template:str): + now = get_china_now() + date_str = now.strftime("%Y年%m月%d日") + weekdays = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] + weekday = weekdays[now.weekday()] + system_prompt = system_prompt_template.format(date=date_str,weekday=weekday) + return system_prompt + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + tools:list = kwargs['tools'] + assert all(isinstance(tool,BaseTool) for tool in tools),tools + fewshot_examples = kwargs.get('fewshot_examples',[]) + if fewshot_examples: + fewshot_examples.append({ + "name": "give_rhetorical_question", + "query": "今天天气怎么样?", + "kwargs": {"question": "请问你想了解哪个城市的天气?"} + }) + user_system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="user_prompt" + ).prompt_template + + user_system_prompt = kwargs.get("user_prompt",None) or user_system_prompt + + user_system_prompt = cls.get_common_system_prompt( + user_system_prompt + ) + guidelines_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="guidelines_prompt" + ).prompt_template + + guidelines_prompt = kwargs.get("guidelines_prompt",None) or guidelines_prompt + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + # tools_formatted = convert_openai_tool_to_anthropic(tools) + + if fewshot_examples: + system_prompt = SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES.format( + tools=tools_formatted, + fewshot_examples=cls.format_fewshot_examples(fewshot_examples), + tool_call_guidelines=guidelines_prompt + ) + else: + system_prompt = SYSTEM_MESSAGE_PROMPT.format( + tools=tools_formatted, + tool_call_guidelines=guidelines_prompt + ) + + system_prompt = user_system_prompt + system_prompt + tool_calling_template = ChatPromptTemplate.from_messages( + [ + SystemMessage(content=system_prompt), + ("placeholder", "{chat_history}"), + AIMessage(content="") + ]) + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | tool_calling_template \ + | RunnableLambda(lambda x: print_llm_messages(f"Agent messages: {x.messages}") or x.messages ) \ + | llm | RunnableLambda(lambda message:cls.parse_function_calls_from_ai_message( + message + )) + return chain + + +class Claude21ToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_21 + + +class ClaudeInstanceToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_INSTANCE + + +class Claude3SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" From cf479fbcc62437bf5d18e6976e596dec103c9825 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Thu, 24 Oct 2024 07:13:23 +0000 Subject: [PATCH 023/110] fix: fix chat mode issue --- .../online/common_logic/common_utils/lambda_invoke_utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py index 5188480da..a03b0cf93 100644 --- a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py +++ b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py @@ -22,7 +22,8 @@ "agent": "Agent", "tools_choose_and_results_generation": "Tool Calling", "results_evaluation": "Result Evaluation", - "tool_execution": "Final Tool Result" + "tool_execution": "Final Tool Result", + "llm_direct_results_generation": "LLM Response" } class LAMBDA_INVOKE_MODE(enum.Enum): @@ -280,7 +281,7 @@ def wrapper(state: Dict[str, Any]) -> Dict[str, Any]: current_stream_use = state["stream"] ws_connection_id = state["ws_connection_id"] enable_trace = state["enable_trace"] - send_trace(f"\n\n ### {__FUNC_NAME_MAP[func.__name__]}\n\n", + send_trace(f"\n\n ### {__FUNC_NAME_MAP.get(func.__name__, func.__name__)}\n\n", current_stream_use, ws_connection_id, enable_trace) state['trace_infos'].append( f"Enter: {func.__name__}, time: {time.time()}") From b4500efc0fa1fab5801ea455d6905b0857fa3790 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Thu, 24 Oct 2024 15:49:09 +0800 Subject: [PATCH 024/110] update test case --- .../Intelli-Agent-RESTful-API-prod-oas30.json | 2784 +++++++---------- .../rest_api/docs/Aicusapico2TwvXbhsTncy.md | 31 - .../docs/Aicusapico2TwvXbhsTncyConfig.md | 30 - .../rest_api/docs/Aicusapico2eyMRt6useQL.md | 30 + .../rest_api/docs/Aicusapico35klzY80ikPh.md | 30 - .../docs/Aicusapico35klzY80ikPhItemsInner.md | 33 - ...sapico35klzY80ikPhItemsInnerQAListInner.md | 31 - .../rest_api/docs/Aicusapico4LPAf103DGIi.md | 30 + .../docs/Aicusapico4LPAf103DGIiData.md | 31 + .../rest_api/docs/Aicusapico4rwMspzeBOe5.md | 31 - .../rest_api/docs/Aicusapico51RafCAYOxiZ.md | 30 - .../rest_api/docs/Aicusapico5ObTetko9oMO.md | 30 + .../docs/Aicusapico5ObTetko9oMOItemsInner.md | 34 + .../rest_api/docs/AicusapicoDPw375iu4xb1.md | 29 + .../rest_api/docs/AicusapicoHWyvBnB1QggI.md | 31 + .../docs/AicusapicoHWyvBnB1QggIConfig.md | 30 + ...md => AicusapicoHWyvBnB1QggIItemsInner.md} | 16 +- ...3FRwxBjhG.md => AicusapicoKUtg5hw5MQ23.md} | 16 +- .../rest_api/docs/AicusapicoNPq1TceemSd8.md | 31 + ...md => AicusapicoNPq1TceemSd8ItemsInner.md} | 16 +- .../rest_api/docs/AicusapicoOzq0ulOG1nrK.md | 31 + ... => AicusapicoOzq0ulOG1nrKInputPayload.md} | 16 +- .../rest_api/docs/AicusapicoQjcoKzzZFI86.md | 30 - .../docs/AicusapicoQjcoKzzZFI86ItemsInner.md | 34 - ...ArXMRpSNs.md => AicusapicoTaAUp0RjHHQ0.md} | 16 +- .../rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md | 32 - .../rest_api/docs/AicusapicoYa9VOrUQINzF.md | 32 + .../docs/AicusapicoYa9VOrUQINzFItemsInner.md | 32 + ...LNul8cwxa.md => AicusapicoZeNx832zHfgx.md} | 16 +- .../rest_api/docs/AicusapicoaOehYyqx8qlR.md | 32 + .../docs/AicusapicoaOehYyqx8qlRIndexIds.md | 31 + .../rest_api/docs/Aicusapicob9jxGQ8zv1AS.md | 31 - .../rest_api/docs/AicusapicobMN2pLK9AvE8.md | 32 - .../docs/AicusapicobMN2pLK9AvE8Index.md | 31 - ...129M65yKV.md => AicusapicodBETf4Zuz6WH.md} | 16 +- .../rest_api/docs/AicusapicohQbFv37cvtQS.md | 32 - .../docs/AicusapicohQbFv37cvtQSIndexIds.md | 31 - .../rest_api/docs/AicusapicoiXUam8N8Dh8l.md | 32 - .../docs/AicusapicoiXUam8N8Dh8lItemsInner.md | 32 - .../rest_api/docs/AicusapicoqhVwTerAVPQm.md | 30 + .../docs/AicusapicoqhVwTerAVPQmItemsInner.md | 33 + ...sapicoqhVwTerAVPQmItemsInnerQAListInner.md | 31 + .../rest_api/docs/Aicusapicor1Kt5C2mLnkm.md | 29 - .../rest_api/docs/Aicusapicou6VksROJ90h2.md | 32 + .../docs/Aicusapicou6VksROJ90h2Index.md | 31 + ...7t5vTA2ak.md => Aicusapicoyip3eUBUK13Z.md} | 16 +- .../biz_logic/rest_api/docs/DefaultApi.md | 384 +-- .../rest_api/openapi_client/__init__.py | 59 +- .../openapi_client/api/default_api.py | 429 ++- .../rest_api/openapi_client/api_client.py | 2 +- .../rest_api/openapi_client/configuration.py | 14 +- .../rest_api/openapi_client/exceptions.py | 2 +- .../openapi_client/models/__init__.py | 55 +- ...y_oxi_z.py => aicusapico2ey_mrt6use_ql.py} | 10 +- .../models/aicusapico4_lpaf103_dgii.py | 93 + ...aq.py => aicusapico4_lpaf103_dgii_data.py} | 20 +- ..._zfi86.py => aicusapico5_ob_tetko9o_mo.py} | 16 +- ... aicusapico5_ob_tetko9o_mo_items_inner.py} | 10 +- ...c2m_lnkm.py => aicusapico_dpw375iu4xb1.py} | 10 +- ..._as.py => aicusapico_h_wyv_bn_b1_qgg_i.py} | 22 +- ...=> aicusapico_h_wyv_bn_b1_qgg_i_config.py} | 10 +- ...cusapico_h_wyv_bn_b1_qgg_i_items_inner.py} | 10 +- ..._bjh_g.py => aicusapico_k_utg5hw5_mq23.py} | 10 +- ...s_tncy.py => aicusapico_npq1_tceem_sd8.py} | 22 +- ... aicusapico_npq1_tceem_sd8_items_inner.py} | 10 +- ...e_boe5.py => aicusapico_ozq0ul_og1nr_k.py} | 16 +- ...icusapico_ozq0ul_og1nr_k_input_payload.py} | 10 +- ...p_sns.py => aicusapico_ta_aup0_rj_hhq0.py} | 10 +- ..._dh8l.py => aicusapico_ya9_vor_uqinz_f.py} | 22 +- ...aicusapico_ya9_vor_uqinz_f_items_inner.py} | 10 +- ...l8cwxa.py => aicusapico_ze_nx832z_hfgx.py} | 10 +- ...cvt_qs.py => aicusapicoa_oeh_yyqx8ql_r.py} | 16 +- ...=> aicusapicoa_oeh_yyqx8ql_r_index_ids.py} | 10 +- ...65y_kv.py => aicusapicod_betf4_zuz6_wh.py} | 10 +- ...0ik_ph.py => aicusapicoqh_vw_ter_avpqm.py} | 16 +- ... aicusapicoqh_vw_ter_avpqm_items_inner.py} | 16 +- ...vw_ter_avpqm_items_inner_qa_list_inner.py} | 10 +- ...9_av_e8.py => aicusapicou6_vks_roj90h2.py} | 16 +- ...x.py => aicusapicou6_vks_roj90h2_index.py} | 10 +- ...v_ta2ak.py => aicusapicoyip3e_ubuk13_z.py} | 10 +- .../biz_logic/rest_api/openapi_client/rest.py | 2 +- api_test/sourceGen.sh | 9 +- api_test/test_case/test_01_rest_document.py | 132 +- 83 files changed, 2583 insertions(+), 3005 deletions(-) delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico2eyMRt6useQL.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIi.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIiData.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMO.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMOItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoDPw375iu4xb1.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggI.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIConfig.md rename api_test/biz_logic/rest_api/docs/{Aicusapicob9jxGQ8zv1ASItemsInner.md => AicusapicoHWyvBnB1QggIItemsInner.md} (52%) rename api_test/biz_logic/rest_api/docs/{Aicusapicoh5w3FRwxBjhG.md => AicusapicoKUtg5hw5MQ23.md} (52%) create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8.md rename api_test/biz_logic/rest_api/docs/{Aicusapico2TwvXbhsTncyItemsInner.md => AicusapicoNPq1TceemSd8ItemsInner.md} (59%) create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrK.md rename api_test/biz_logic/rest_api/docs/{Aicusapico4rwMspzeBOe5InputPayload.md => AicusapicoOzq0ulOG1nrKInputPayload.md} (51%) delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md rename api_test/biz_logic/rest_api/docs/{AicusapicoseOArXMRpSNs.md => AicusapicoTaAUp0RjHHQ0.md} (55%) delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzF.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzFItemsInner.md rename api_test/biz_logic/rest_api/docs/{AicusapicoEOcLNul8cwxa.md => AicusapicoZeNx832zHfgx.md} (54%) create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlR.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlRIndexIds.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md rename api_test/biz_logic/rest_api/docs/{AicusapicoCyd129M65yKV.md => AicusapicodBETf4Zuz6WH.md} (51%) delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md delete mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQm.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInner.md create mode 100644 api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInnerQAListInner.md delete mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2.md create mode 100644 api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2Index.md rename api_test/biz_logic/rest_api/docs/{Aicusapicoqew7t5vTA2ak.md => Aicusapicoyip3eUBUK13Z.md} (50%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico51_raf_cay_oxi_z.py => aicusapico2ey_mrt6use_ql.py} (89%) create mode 100644 api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii.py rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico_uy1_ybxi_wj5_aq.py => aicusapico4_lpaf103_dgii_data.py} (80%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico_qjco_kzz_zfi86.py => aicusapico5_ob_tetko9o_mo.py} (84%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico_qjco_kzz_zfi86_items_inner.py => aicusapico5_ob_tetko9o_mo_items_inner.py} (90%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicor1_kt5_c2m_lnkm.py => aicusapico_dpw375iu4xb1.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicob9jx_gq8zv1_as.py => aicusapico_h_wyv_bn_b1_qgg_i.py} (80%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico2_twv_xbhs_tncy_config.py => aicusapico_h_wyv_bn_b1_qgg_i_config.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicob9jx_gq8zv1_as_items_inner.py => aicusapico_h_wyv_bn_b1_qgg_i_items_inner.py} (91%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoh5w3_f_rwx_bjh_g.py => aicusapico_k_utg5hw5_mq23.py} (90%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico2_twv_xbhs_tncy.py => aicusapico_npq1_tceem_sd8.py} (80%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico2_twv_xbhs_tncy_items_inner.py => aicusapico_npq1_tceem_sd8_items_inner.py} (93%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico4rw_mspze_boe5.py => aicusapico_ozq0ul_og1nr_k.py} (84%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico4rw_mspze_boe5_input_payload.py => aicusapico_ozq0ul_og1nr_k_input_payload.py} (91%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicose_oar_xmrp_sns.py => aicusapico_ta_aup0_rj_hhq0.py} (91%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoi_x_uam8_n8_dh8l.py => aicusapico_ya9_vor_uqinz_f.py} (80%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoi_x_uam8_n8_dh8l_items_inner.py => aicusapico_ya9_vor_uqinz_f_items_inner.py} (90%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico_eoc_l_nul8cwxa.py => aicusapico_ze_nx832z_hfgx.py} (90%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoh_qb_fv37cvt_qs.py => aicusapicoa_oeh_yyqx8ql_r.py} (85%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoh_qb_fv37cvt_qs_index_ids.py => aicusapicoa_oeh_yyqx8ql_r_index_ids.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico_cyd129_m65y_kv.py => aicusapicod_betf4_zuz6_wh.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico35klz_y80ik_ph.py => aicusapicoqh_vw_ter_avpqm.py} (84%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico35klz_y80ik_ph_items_inner.py => aicusapicoqh_vw_ter_avpqm_items_inner.py} (84%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py => aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicob_mn2p_lk9_av_e8.py => aicusapicou6_vks_roj90h2.py} (85%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicob_mn2p_lk9_av_e8_index.py => aicusapicou6_vks_roj90h2_index.py} (89%) rename api_test/biz_logic/rest_api/openapi_client/models/{aicusapicoqew7t5v_ta2ak.py => aicusapicoyip3e_ubuk13_z.py} (89%) diff --git a/api_test/Intelli-Agent-RESTful-API-prod-oas30.json b/api_test/Intelli-Agent-RESTful-API-prod-oas30.json index 4111eda58..97d49e1ab 100644 --- a/api_test/Intelli-Agent-RESTful-API-prod-oas30.json +++ b/api_test/Intelli-Agent-RESTful-API-prod-oas30.json @@ -1,129 +1,84 @@ { - "openapi" : "3.0.1", + "swagger" : "2.0", "info" : { - "title" : "aics-api", "description" : "AI-Customer-Service - Core API", - "version" : "2024-10-21T08:32:58Z" + "version" : "2024-10-24T04:30:07Z", + "title" : "aics-api" }, - "servers" : [ { - "url" : "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/{basePath}", - "variables" : { - "basePath" : { - "default" : "prod" - } - } - } ], + "host" : "c63g9uqsze.execute-api.us-east-1.amazonaws.com", + "basePath" : "/prod", + "schemes" : [ "https" ], "paths" : { - "/chatbot-management/check-default-chatbot" : { - "get" : { - "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - } - }, - "security" : [ { - "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] - } ] - }, + "/" : { "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/intention/execution-presigned-url" : { - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoCyd129M65yKV" - } + "/aos" : { + "get" : { + "produces" : [ "application/json" ], + "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" } }, - "required" : true - }, - "responses" : { "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } - }, + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "post" : { + "produces" : [ "application/json" ], + "responses" : { "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoUy1YBXiWJ5Aq" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "400" : { + "description" : "400 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "500" : { + "description" : "500 response", + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -132,67 +87,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/llm" : { + "/chat-history" : { "post" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -201,67 +137,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/prompt-management/models" : { + "/chat-history/messages" : { "get" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -270,82 +187,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/intention/executions/{executionId}" : { + "/chat-history/sessions" : { "get" : { - "parameters" : [ { - "name" : "intentionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" - } - }, { - "name" : "executionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" - } - } ], + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapico35klzY80ikPh" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -354,88 +237,83 @@ } ] }, "options" : { - "parameters" : [ { - "name" : "executionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" + "consumes" : [ "application/json" ], + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "type" : "string" + }, + "Access-Control-Allow-Methods" : { + "type" : "string" + }, + "Access-Control-Allow-Credentials" : { + "type" : "string" + }, + "Access-Control-Allow-Headers" : { + "type" : "string" + } + } } - } ], + } + } + }, + "/chatbot-management" : { + "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, "/chatbot-management/chatbots" : { "get" : { + "produces" : [ "application/json" ], "parameters" : [ { "name" : "page_size", "in" : "query", - "schema" : { - "type" : "string" - } + "required" : false, + "type" : "string" }, { "name" : "max_items", "in" : "query", - "schema" : { - "type" : "string" - } + "required" : false, + "type" : "string" } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoYa9VOrUQINzF" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoiXUam8N8Dh8l" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -444,45 +322,33 @@ } ] }, "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicobMN2pLK9AvE8" - } + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "aicusapicou6VksROJ90h2", + "required" : true, + "schema" : { + "$ref" : "#/definitions/aicusapicou6VksROJ90h2" + } + } ], + "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoaOehYyqx8qlR" } }, - "required" : true - }, - "responses" : { "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicohQbFv37cvtQS" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -491,104 +357,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/prompt-management/prompts" : { - "get" : { + "/chatbot-management/check-chatbot" : { + "post" : { + "produces" : [ "application/json" ], "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } - } - }, - "security" : [ { - "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] - } ] - }, - "post" : { - "responses" : { + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -597,99 +407,98 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/chatbot-management" : { + "/chatbot-management/check-default-chatbot" : { + "get" : { + "produces" : [ "application/json" ], + "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "400" : { + "description" : "400 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "500" : { + "description" : "500 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/chat-history" : { - "post" : { + "/chatbot-management/embeddings" : { + "get" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -698,67 +507,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/chatbot-management/check-chatbot" : { + "/extract" : { "post" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -767,136 +557,72 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/knowledge-base" : { + "/intention" : { "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/aos" : { + "/intention/download-template" : { "get" : { + "produces" : [ "application/json" ], "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } - } - }, - "security" : [ { - "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] - } ] - }, - "post" : { - "responses" : { + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -905,99 +631,57 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/prompt-management" : { - "options" : { - "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } - } - }, - "content" : { } + "/intention/execution-presigned-url" : { + "post" : { + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "aicusapico2eyMRt6useQL", + "required" : true, + "schema" : { + "$ref" : "#/definitions/aicusapico2eyMRt6useQL" } - } - } - }, - "/chat-history/sessions" : { - "get" : { + } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoKUtg5hw5MQ23" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1006,112 +690,59 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/intention" : { - "options" : { - "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } - } - }, - "content" : { } - } - } - } - }, - "/intention/executions" : { - "get" : { - "parameters" : [ { - "name" : "page_size", - "in" : "query", - "schema" : { - "type" : "string" - } - }, { - "name" : "max_items", - "in" : "query", - "schema" : { - "type" : "string" - } - } ], + "/intention/executions" : { + "get" : { + "produces" : [ "application/json" ], + "parameters" : [ { + "name" : "page_size", + "in" : "query", + "required" : false, + "type" : "string" + }, { + "name" : "max_items", + "in" : "query", + "required" : false, + "type" : "string" + } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoHWyvBnB1QggI" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicob9jxGQ8zv1AS" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1120,45 +751,33 @@ } ] }, "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoEOcLNul8cwxa" - } + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "aicusapicoZeNx832zHfgx", + "required" : true, + "schema" : { + "$ref" : "#/definitions/aicusapicoZeNx832zHfgx" + } + } ], + "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoOzq0ulOG1nrK" } }, - "required" : true - }, - "responses" : { "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapico4rwMspzeBOe5" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1167,35 +786,24 @@ } ] }, "delete" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1204,127 +812,59 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/knowledge-base/executions" : { + "/intention/executions/{executionId}" : { "get" : { + "produces" : [ "application/json" ], "parameters" : [ { - "name" : "page_size", - "in" : "query", - "schema" : { - "type" : "string" - } + "name" : "intentionId", + "in" : "path", + "required" : true, + "type" : "string" }, { - "name" : "max_items", - "in" : "query", - "schema" : { - "type" : "string" - } + "name" : "executionId", + "in" : "path", + "required" : true, + "type" : "string" } ], "responses" : { - "400" : { - "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "500" : { - "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapico2TwvXbhsTncy" - } - } - } - } - }, - "security" : [ { - "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] - } ] - }, - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoseOArXMRpSNs" - } + "schema" : { + "$ref" : "#/definitions/aicusapicoqhVwTerAVPQm" } }, - "required" : true - }, - "responses" : { "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1332,46 +872,55 @@ "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, - "delete" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicor1Kt5C2mLnkm" + "options" : { + "consumes" : [ "application/json" ], + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "type" : "string" + } ], + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "type" : "string" + }, + "Access-Control-Allow-Methods" : { + "type" : "string" + }, + "Access-Control-Allow-Credentials" : { + "type" : "string" + }, + "Access-Control-Allow-Headers" : { + "type" : "string" } } - }, - "required" : true - }, + } + } + } + }, + "/intention/index-used-scan" : { + "post" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoqew7t5vTA2ak" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1380,107 +929,83 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/" : { + "/knowledge-base" : { "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/knowledge-base/executions/{executionId}" : { + "/knowledge-base/executions" : { "get" : { + "produces" : [ "application/json" ], "parameters" : [ { - "name" : "executionId", - "in" : "path", - "required" : true, - "schema" : { - "type" : "string" - } + "name" : "page_size", + "in" : "query", + "required" : false, + "type" : "string" + }, { + "name" : "max_items", + "in" : "query", + "required" : false, + "type" : "string" } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapicoNPq1TceemSd8" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoQjcoKzzZFI86" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1488,76 +1013,69 @@ "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] } ] }, - "options" : { + "post" : { + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], "parameters" : [ { - "name" : "executionId", - "in" : "path", + "in" : "body", + "name" : "aicusapicoTaAUp0RjHHQ0", "required" : true, "schema" : { - "type" : "string" + "$ref" : "#/definitions/aicusapicoTaAUp0RjHHQ0" } } ], "responses" : { - "204" : { - "description" : "204 response", - "headers" : { - "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } - }, - "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } - } - }, - "content" : { } - } - } - } - }, - "/intention/download-template" : { - "get" : { - "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } - }, + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "delete" : { + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "aicusapicoDPw375iu4xb1", + "required" : true, + "schema" : { + "$ref" : "#/definitions/aicusapicoDPw375iu4xb1" + } + } ], + "responses" : { "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/aicusapicoyip3eUBUK13Z" + } + }, + "400" : { + "description" : "400 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "500" : { + "description" : "500 response", + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1566,67 +1084,54 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/prompt-management/scenes" : { + "/knowledge-base/executions/{executionId}" : { "get" : { + "produces" : [ "application/json" ], + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "type" : "string" + } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapico5ObTetko9oMO" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1635,67 +1140,63 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], + "parameters" : [ { + "name" : "executionId", + "in" : "path", + "required" : true, + "type" : "string" + } ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/intention/index-used-scan" : { + "/knowledge-base/kb-presigned-url" : { "post" : { + "consumes" : [ "application/json" ], + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "aicusapicodBETf4Zuz6WH", + "required" : true, + "schema" : { + "$ref" : "#/definitions/aicusapicodBETf4Zuz6WH" + } + } ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/aicusapico4LPAf103DGIi" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1704,67 +1205,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/extract" : { + "/llm" : { "post" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1773,67 +1255,72 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/chat-history/messages" : { + "/prompt-management" : { + "options" : { + "consumes" : [ "application/json" ], + "responses" : { + "204" : { + "description" : "204 response", + "headers" : { + "Access-Control-Allow-Origin" : { + "type" : "string" + }, + "Access-Control-Allow-Methods" : { + "type" : "string" + }, + "Access-Control-Allow-Credentials" : { + "type" : "string" + }, + "Access-Control-Allow-Headers" : { + "type" : "string" + } + } + } + } + } + }, + "/prompt-management/models" : { "get" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1842,67 +1329,74 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/chatbot-management/embeddings" : { + "/prompt-management/prompts" : { "get" : { + "produces" : [ "application/json" ], "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } - }, + } + }, + "security" : [ { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : [ ] + } ] + }, + "post" : { + "produces" : [ "application/json" ], + "responses" : { "200" : { "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "400" : { + "description" : "400 response", + "schema" : { + "$ref" : "#/definitions/Empty" + } + }, + "500" : { + "description" : "500 response", + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1911,77 +1405,48 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } }, - "/knowledge-base/kb-presigned-url" : { - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapico51RafCAYOxiZ" - } + "/prompt-management/scenes" : { + "get" : { + "produces" : [ "application/json" ], + "responses" : { + "200" : { + "description" : "200 response", + "schema" : { + "$ref" : "#/definitions/Empty" } }, - "required" : true - }, - "responses" : { "400" : { "description" : "400 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } }, "500" : { "description" : "500 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/Empty" - } - } - } - }, - "200" : { - "description" : "200 response", - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/aicusapicoh5w3FRwxBjhG" - } - } + "schema" : { + "$ref" : "#/definitions/Empty" } } }, @@ -1990,549 +1455,544 @@ } ] }, "options" : { + "consumes" : [ "application/json" ], "responses" : { "204" : { "description" : "204 response", "headers" : { "Access-Control-Allow-Origin" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Methods" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Credentials" : { - "schema" : { - "type" : "string" - } + "type" : "string" }, "Access-Control-Allow-Headers" : { - "schema" : { - "type" : "string" - } + "type" : "string" } - }, - "content" : { } + } } } } } }, - "components" : { - "schemas" : { - "aicusapico2TwvXbhsTncy" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "Config" : { - "type" : "object", - "properties" : { - "PageSize" : { - "type" : "integer" - }, - "MaxItems" : { - "type" : "integer" - } - } - }, - "Items" : { - "type" : "array", - "items" : { - "required" : [ "chatbotId", "createTime", "embeddingModelType", "executionId", "executionStatus", "groupName", "indexId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix", "sfnExecutionId", "uiStatus" ], - "type" : "object", - "properties" : { - "executionStatus" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "uiStatus" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" - }, - "qaEnhance" : { - "type" : "string" - }, - "sfnExecutionId" : { - "type" : "string" - }, - "embeddingModelType" : { - "type" : "string" - }, - "offline" : { - "type" : "string" - }, - "executionId" : { - "type" : "string" - }, - "groupName" : { - "type" : "string" - }, - "chatbotId" : { - "type" : "string" - }, - "indexType" : { - "type" : "string" - }, - "createTime" : { - "type" : "string" - }, - "indexId" : { - "type" : "string" - }, - "operationType" : { - "type" : "string" - } - } + "securityDefinitions" : { + "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : { + "type" : "apiKey", + "name" : "Authorization", + "in" : "header", + "x-amazon-apigateway-authtype" : "custom" + } + }, + "definitions" : { + "aicusapicou6VksROJ90h2" : { + "type" : "object", + "required" : [ "chatbotId", "index", "modelId", "modelName" ], + "properties" : { + "modelName" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "modelId" : { + "type" : "string" + }, + "index" : { + "type" : "object", + "properties" : { + "qq" : { + "type" : "string" + }, + "qd" : { + "type" : "string" + }, + "intention" : { + "type" : "string" } }, - "Count" : { - "type" : "integer" - } + "required" : [ "intention", "qd", "qq" ] } }, - "aicusapicoseOArXMRpSNs" : { - "title" : "PostPayload", - "required" : [ "chatbotId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix" ], - "type" : "object", - "properties" : { - "offline" : { - "type" : "string" - }, - "chatbotId" : { - "type" : "string" - }, - "indexType" : { - "type" : "string" - }, - "operationType" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" - }, - "qaEnhance" : { + "title" : "PostPayload" + }, + "aicusapicoZeNx832zHfgx" : { + "type" : "object", + "required" : [ "chatbotId", "index", "model", "s3Bucket", "s3Prefix" ], + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "index" : { + "type" : "string" + }, + "model" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + } + }, + "title" : "PostPayload" + }, + "aicusapicoDPw375iu4xb1" : { + "type" : "object", + "required" : [ "executionId" ], + "properties" : { + "executionId" : { + "type" : "array", + "items" : { "type" : "string" } } }, - "aicusapicobMN2pLK9AvE8" : { - "title" : "PostPayload", - "required" : [ "chatbotId", "index", "modelId", "modelName" ], - "type" : "object", - "properties" : { - "modelName" : { - "type" : "string" - }, - "chatbotId" : { - "type" : "string" - }, - "modelId" : { - "type" : "string" - }, - "index" : { - "required" : [ "intention", "qd", "qq" ], + "title" : "PostPayload" + }, + "aicusapicoHWyvBnB1QggI" : { + "type" : "object", + "properties" : { + "Config" : { + "type" : "object", + "properties" : { + "PageSize" : { + "type" : "integer" + }, + "MaxItems" : { + "type" : "integer" + } + } + }, + "Items" : { + "type" : "array", + "items" : { "type" : "object", "properties" : { - "qq" : { + "executionId" : { + "type" : "string" + }, + "fileName" : { + "type" : "string" + }, + "createBy" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "createTime" : { + "type" : "string" + }, + "executionStatus" : { + "type" : "string" + }, + "index" : { "type" : "string" }, - "qd" : { + "model" : { + "type" : "string" + }, + "details" : { "type" : "string" }, - "intention" : { + "tag" : { "type" : "string" } - } + }, + "required" : [ "chatbotId", "createBy", "createTime", "details", "executionId", "executionStatus", "fileName", "index", "model", "tag" ] } + }, + "Count" : { + "type" : "integer" } }, - "aicusapicoEOcLNul8cwxa" : { - "title" : "PostPayload", - "required" : [ "chatbotId", "index", "model", "s3Bucket", "s3Prefix" ], - "type" : "object", - "properties" : { - "chatbotId" : { - "type" : "string" - }, - "index" : { - "type" : "string" - }, - "model" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" + "title" : "ResponsePayload" + }, + "aicusapicoqhVwTerAVPQm" : { + "type" : "object", + "properties" : { + "Items" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { + "s3Path" : { + "type" : "string" + }, + "createTime" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "QAList" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { + "question" : { + "type" : "string" + }, + "kwargs" : { + "type" : "string" + }, + "intention" : { + "type" : "string" + } + } + } + }, + "status" : { + "type" : "string" + } + }, + "required" : [ "createTime", "s3Path", "s3Prefix", "status" ] } + }, + "Count" : { + "type" : "integer" } }, - "aicusapicoUy1YBXiWJ5Aq" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "data" : { - "type" : "string" - }, - "message" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" - } + "title" : "ResponsePayload" + }, + "aicusapicodBETf4Zuz6WH" : { + "type" : "object", + "required" : [ "content_type", "file_name" ], + "properties" : { + "content_type" : { + "type" : "string" + }, + "file_name" : { + "type" : "string" } }, - "aicusapicoh5w3FRwxBjhG" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "data" : { - "type" : "string" - }, - "message" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { + "title" : "PostPayload" + }, + "aicusapicoYa9VOrUQINzF" : { + "type" : "object", + "properties" : { + "chatbot_ids" : { + "type" : "array", + "items" : { "type" : "string" } - } - }, - "aicusapicor1Kt5C2mLnkm" : { - "title" : "PostPayload", - "required" : [ "executionId" ], - "type" : "object", - "properties" : { - "executionId" : { - "type" : "array", - "items" : { - "type" : "string" + }, + "Config" : { + "type" : "object", + "properties" : { + "PageSize" : { + "type" : "integer" + }, + "MaxItems" : { + "type" : "integer" } } + }, + "Items" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { + "ChatbotId" : { + "type" : "string" + }, + "ModelName" : { + "type" : "string" + }, + "LastModifiedTime" : { + "type" : "string" + }, + "ModelId" : { + "type" : "string" + } + }, + "required" : [ "ChatbotId", "LastModifiedTime", "ModelId", "ModelName" ] + } + }, + "Count" : { + "type" : "integer" } }, - "aicusapico4rwMspzeBOe5" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "result" : { - "type" : "string" - }, - "execution_id" : { - "type" : "string" - }, - "input_payload" : { + "title" : "ResponsePayload" + }, + "aicusapicoTaAUp0RjHHQ0" : { + "type" : "object", + "required" : [ "chatbotId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix" ], + "properties" : { + "offline" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "indexType" : { + "type" : "string" + }, + "operationType" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + }, + "qaEnhance" : { + "type" : "string" + } + }, + "title" : "PostPayload" + }, + "aicusapico5ObTetko9oMO" : { + "type" : "object", + "properties" : { + "Items" : { + "type" : "array", + "items" : { "type" : "object", "properties" : { - "chatbotId" : { + "executionId" : { "type" : "string" }, - "groupName" : { + "s3Path" : { "type" : "string" }, - "tableItemId" : { + "createTime" : { "type" : "string" }, - "fieldName" : { + "s3Prefix" : { "type" : "string" }, - "index" : { + "s3Bucket" : { "type" : "string" }, - "model" : { + "status" : { "type" : "string" } - } + }, + "required" : [ "createTime", "executionId", "s3Bucket", "s3Path", "s3Prefix", "status" ] } + }, + "Count" : { + "type" : "integer" } }, - "aicusapicoiXUam8N8Dh8l" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "chatbot_ids" : { - "type" : "array", - "items" : { + "title" : "ResponsePayload" + }, + "aicusapicoaOehYyqx8qlR" : { + "type" : "object", + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "indexIds" : { + "type" : "object", + "properties" : { + "qq" : { + "type" : "string" + }, + "qd" : { + "type" : "string" + }, + "intention" : { "type" : "string" } - }, - "Config" : { - "type" : "object", - "properties" : { - "PageSize" : { - "type" : "integer" - }, - "MaxItems" : { - "type" : "integer" - } - } - }, - "Items" : { - "type" : "array", - "items" : { - "required" : [ "ChatbotId", "LastModifiedTime", "ModelId", "ModelName" ], - "type" : "object", - "properties" : { - "ChatbotId" : { - "type" : "string" - }, - "ModelName" : { - "type" : "string" - }, - "LastModifiedTime" : { - "type" : "string" - }, - "ModelId" : { - "type" : "string" - } - } - } - }, - "Count" : { - "type" : "integer" } + }, + "Message" : { + "type" : "string" } }, - "aicusapicob9jxGQ8zv1AS" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "Config" : { - "type" : "object", - "properties" : { - "PageSize" : { - "type" : "integer" - }, - "MaxItems" : { - "type" : "integer" - } - } - }, - "Items" : { - "type" : "array", - "items" : { - "required" : [ "chatbotId", "createBy", "createTime", "details", "executionId", "executionStatus", "fileName", "index", "model", "tag" ], - "type" : "object", - "properties" : { - "executionId" : { - "type" : "string" - }, - "fileName" : { - "type" : "string" - }, - "createBy" : { - "type" : "string" - }, - "chatbotId" : { - "type" : "string" - }, - "createTime" : { - "type" : "string" - }, - "executionStatus" : { - "type" : "string" - }, - "index" : { - "type" : "string" - }, - "model" : { - "type" : "string" - }, - "details" : { - "type" : "string" - }, - "tag" : { - "type" : "string" - } - } + "title" : "ResponsePayload" + }, + "Empty" : { + "type" : "object", + "title" : "Empty Schema" + }, + "aicusapico4LPAf103DGIi" : { + "type" : "object", + "properties" : { + "data" : { + "type" : "object", + "properties" : { + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + }, + "url" : { + "type" : "string" } - }, - "Count" : { - "type" : "integer" } + }, + "message" : { + "type" : "string" } }, - "Empty" : { - "title" : "Empty Schema", - "type" : "object" - }, - "aicusapicoCyd129M65yKV" : { - "title" : "PostPayload", - "required" : [ "content_type", "file_name" ], - "type" : "object", - "properties" : { - "content_type" : { - "type" : "string" - }, - "file_name" : { - "type" : "string" + "title" : "ResponsePayload" + }, + "aicusapicoNPq1TceemSd8" : { + "type" : "object", + "properties" : { + "Config" : { + "type" : "object", + "properties" : { + "PageSize" : { + "type" : "integer" + }, + "MaxItems" : { + "type" : "integer" + } } - } - }, - "aicusapicohQbFv37cvtQS" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "chatbotId" : { - "type" : "string" - }, - "groupName" : { - "type" : "string" - }, - "indexIds" : { + }, + "Items" : { + "type" : "array", + "items" : { "type" : "object", "properties" : { - "qq" : { + "executionStatus" : { "type" : "string" }, - "qd" : { + "s3Prefix" : { "type" : "string" }, - "intention" : { + "uiStatus" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" + }, + "qaEnhance" : { + "type" : "string" + }, + "sfnExecutionId" : { + "type" : "string" + }, + "embeddingModelType" : { + "type" : "string" + }, + "offline" : { + "type" : "string" + }, + "executionId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "chatbotId" : { + "type" : "string" + }, + "indexType" : { + "type" : "string" + }, + "createTime" : { + "type" : "string" + }, + "indexId" : { + "type" : "string" + }, + "operationType" : { "type" : "string" } - } - }, - "Message" : { - "type" : "string" - } - } - }, - "aicusapicoQjcoKzzZFI86" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "Items" : { - "type" : "array", - "items" : { - "required" : [ "createTime", "executionId", "s3Bucket", "s3Path", "s3Prefix", "status" ], - "type" : "object", - "properties" : { - "executionId" : { - "type" : "string" - }, - "s3Path" : { - "type" : "string" - }, - "createTime" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "s3Bucket" : { - "type" : "string" - }, - "status" : { - "type" : "string" - } - } - } - }, - "Count" : { - "type" : "integer" + }, + "required" : [ "chatbotId", "createTime", "embeddingModelType", "executionId", "executionStatus", "groupName", "indexId", "indexType", "offline", "operationType", "qaEnhance", "s3Bucket", "s3Prefix", "sfnExecutionId", "uiStatus" ] } + }, + "Count" : { + "type" : "integer" } }, - "aicusapicoqew7t5vTA2ak" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "data" : { - "type" : "array", - "items" : { + "title" : "ResponsePayload" + }, + "aicusapicoOzq0ulOG1nrK" : { + "type" : "object", + "properties" : { + "result" : { + "type" : "string" + }, + "execution_id" : { + "type" : "string" + }, + "input_payload" : { + "type" : "object", + "properties" : { + "chatbotId" : { + "type" : "string" + }, + "groupName" : { + "type" : "string" + }, + "tableItemId" : { + "type" : "string" + }, + "fieldName" : { + "type" : "string" + }, + "index" : { + "type" : "string" + }, + "model" : { "type" : "string" } - }, - "message" : { - "type" : "string" } } }, - "aicusapico35klzY80ikPh" : { - "title" : "ResponsePayload", - "type" : "object", - "properties" : { - "Items" : { - "type" : "array", - "items" : { - "required" : [ "createTime", "s3Path", "s3Prefix", "status" ], - "type" : "object", - "properties" : { - "s3Path" : { - "type" : "string" - }, - "createTime" : { - "type" : "string" - }, - "s3Prefix" : { - "type" : "string" - }, - "QAList" : { - "type" : "array", - "items" : { - "type" : "object", - "properties" : { - "question" : { - "type" : "string" - }, - "kwargs" : { - "type" : "string" - }, - "intention" : { - "type" : "string" - } - } - } - }, - "status" : { - "type" : "string" - } - } - } - }, - "Count" : { - "type" : "integer" - } + "title" : "ResponsePayload" + }, + "aicusapicoKUtg5hw5MQ23" : { + "type" : "object", + "properties" : { + "data" : { + "type" : "string" + }, + "message" : { + "type" : "string" + }, + "s3Prefix" : { + "type" : "string" + }, + "s3Bucket" : { + "type" : "string" } }, - "aicusapico51RafCAYOxiZ" : { - "title" : "PostPayload", - "required" : [ "content_type", "file_name" ], - "type" : "object", - "properties" : { - "content_type" : { - "type" : "string" - }, - "file_name" : { + "title" : "ResponsePayload" + }, + "aicusapicoyip3eUBUK13Z" : { + "type" : "object", + "properties" : { + "data" : { + "type" : "array", + "items" : { "type" : "string" } + }, + "message" : { + "type" : "string" } - } + }, + "title" : "ResponsePayload" }, - "securitySchemes" : { - "aicustomerserviceapiconstructApiAuthorizerEB0B49FC" : { - "type" : "apiKey", - "name" : "Authorization", - "in" : "header", - "x-amazon-apigateway-authtype" : "custom" - } + "aicusapico2eyMRt6useQL" : { + "type" : "object", + "required" : [ "content_type", "file_name" ], + "properties" : { + "content_type" : { + "type" : "string" + }, + "file_name" : { + "type" : "string" + } + }, + "title" : "PostPayload" } } } \ No newline at end of file diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md deleted file mode 100644 index ca53bff91..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncy.md +++ /dev/null @@ -1,31 +0,0 @@ -# Aicusapico2TwvXbhsTncy - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] -**items** | [**List[Aicusapico2TwvXbhsTncyItemsInner]**](Aicusapico2TwvXbhsTncyItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico2TwvXbhsTncy from a JSON string -aicusapico2_twv_xbhs_tncy_instance = Aicusapico2TwvXbhsTncy.from_json(json) -# print the JSON string representation of the object -print(Aicusapico2TwvXbhsTncy.to_json()) - -# convert the object into a dict -aicusapico2_twv_xbhs_tncy_dict = aicusapico2_twv_xbhs_tncy_instance.to_dict() -# create an instance of Aicusapico2TwvXbhsTncy from a dict -aicusapico2_twv_xbhs_tncy_from_dict = Aicusapico2TwvXbhsTncy.from_dict(aicusapico2_twv_xbhs_tncy_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md b/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md deleted file mode 100644 index ca597a5cf..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyConfig.md +++ /dev/null @@ -1,30 +0,0 @@ -# Aicusapico2TwvXbhsTncyConfig - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**page_size** | **int** | | [optional] -**max_items** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico2TwvXbhsTncyConfig from a JSON string -aicusapico2_twv_xbhs_tncy_config_instance = Aicusapico2TwvXbhsTncyConfig.from_json(json) -# print the JSON string representation of the object -print(Aicusapico2TwvXbhsTncyConfig.to_json()) - -# convert the object into a dict -aicusapico2_twv_xbhs_tncy_config_dict = aicusapico2_twv_xbhs_tncy_config_instance.to_dict() -# create an instance of Aicusapico2TwvXbhsTncyConfig from a dict -aicusapico2_twv_xbhs_tncy_config_from_dict = Aicusapico2TwvXbhsTncyConfig.from_dict(aicusapico2_twv_xbhs_tncy_config_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2eyMRt6useQL.md b/api_test/biz_logic/rest_api/docs/Aicusapico2eyMRt6useQL.md new file mode 100644 index 000000000..1e22752e8 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico2eyMRt6useQL.md @@ -0,0 +1,30 @@ +# Aicusapico2eyMRt6useQL + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**content_type** | **str** | | +**file_name** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico2ey_mrt6use_ql import Aicusapico2eyMRt6useQL + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico2eyMRt6useQL from a JSON string +aicusapico2ey_mrt6use_ql_instance = Aicusapico2eyMRt6useQL.from_json(json) +# print the JSON string representation of the object +print(Aicusapico2eyMRt6useQL.to_json()) + +# convert the object into a dict +aicusapico2ey_mrt6use_ql_dict = aicusapico2ey_mrt6use_ql_instance.to_dict() +# create an instance of Aicusapico2eyMRt6useQL from a dict +aicusapico2ey_mrt6use_ql_from_dict = Aicusapico2eyMRt6useQL.from_dict(aicusapico2ey_mrt6use_ql_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md deleted file mode 100644 index f58cdc133..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPh.md +++ /dev/null @@ -1,30 +0,0 @@ -# Aicusapico35klzY80ikPh - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**items** | [**List[Aicusapico35klzY80ikPhItemsInner]**](Aicusapico35klzY80ikPhItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico35klzY80ikPh from a JSON string -aicusapico35klz_y80ik_ph_instance = Aicusapico35klzY80ikPh.from_json(json) -# print the JSON string representation of the object -print(Aicusapico35klzY80ikPh.to_json()) - -# convert the object into a dict -aicusapico35klz_y80ik_ph_dict = aicusapico35klz_y80ik_ph_instance.to_dict() -# create an instance of Aicusapico35klzY80ikPh from a dict -aicusapico35klz_y80ik_ph_from_dict = Aicusapico35klzY80ikPh.from_dict(aicusapico35klz_y80ik_ph_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md deleted file mode 100644 index d793aed1f..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInner.md +++ /dev/null @@ -1,33 +0,0 @@ -# Aicusapico35klzY80ikPhItemsInner - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**s3_path** | **str** | | -**create_time** | **str** | | -**s3_prefix** | **str** | | -**qa_list** | [**List[Aicusapico35klzY80ikPhItemsInnerQAListInner]**](Aicusapico35klzY80ikPhItemsInnerQAListInner.md) | | [optional] -**status** | **str** | | - -## Example - -```python -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico35klzY80ikPhItemsInner from a JSON string -aicusapico35klz_y80ik_ph_items_inner_instance = Aicusapico35klzY80ikPhItemsInner.from_json(json) -# print the JSON string representation of the object -print(Aicusapico35klzY80ikPhItemsInner.to_json()) - -# convert the object into a dict -aicusapico35klz_y80ik_ph_items_inner_dict = aicusapico35klz_y80ik_ph_items_inner_instance.to_dict() -# create an instance of Aicusapico35klzY80ikPhItemsInner from a dict -aicusapico35klz_y80ik_ph_items_inner_from_dict = Aicusapico35klzY80ikPhItemsInner.from_dict(aicusapico35klz_y80ik_ph_items_inner_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md deleted file mode 100644 index c5e6c10e9..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico35klzY80ikPhItemsInnerQAListInner.md +++ /dev/null @@ -1,31 +0,0 @@ -# Aicusapico35klzY80ikPhItemsInnerQAListInner - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**question** | **str** | | [optional] -**kwargs** | **str** | | [optional] -**intention** | **str** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a JSON string -aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_instance = Aicusapico35klzY80ikPhItemsInnerQAListInner.from_json(json) -# print the JSON string representation of the object -print(Aicusapico35klzY80ikPhItemsInnerQAListInner.to_json()) - -# convert the object into a dict -aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_dict = aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_instance.to_dict() -# create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a dict -aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_from_dict = Aicusapico35klzY80ikPhItemsInnerQAListInner.from_dict(aicusapico35klz_y80ik_ph_items_inner_qa_list_inner_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIi.md b/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIi.md new file mode 100644 index 000000000..04d03d831 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIi.md @@ -0,0 +1,30 @@ +# Aicusapico4LPAf103DGIi + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**data** | [**Aicusapico4LPAf103DGIiData**](Aicusapico4LPAf103DGIiData.md) | | [optional] +**message** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico4_lpaf103_dgii import Aicusapico4LPAf103DGIi + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico4LPAf103DGIi from a JSON string +aicusapico4_lpaf103_dgii_instance = Aicusapico4LPAf103DGIi.from_json(json) +# print the JSON string representation of the object +print(Aicusapico4LPAf103DGIi.to_json()) + +# convert the object into a dict +aicusapico4_lpaf103_dgii_dict = aicusapico4_lpaf103_dgii_instance.to_dict() +# create an instance of Aicusapico4LPAf103DGIi from a dict +aicusapico4_lpaf103_dgii_from_dict = Aicusapico4LPAf103DGIi.from_dict(aicusapico4_lpaf103_dgii_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIiData.md b/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIiData.md new file mode 100644 index 000000000..eb36971f6 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico4LPAf103DGIiData.md @@ -0,0 +1,31 @@ +# Aicusapico4LPAf103DGIiData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**s3_prefix** | **str** | | [optional] +**s3_bucket** | **str** | | [optional] +**url** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico4_lpaf103_dgii_data import Aicusapico4LPAf103DGIiData + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico4LPAf103DGIiData from a JSON string +aicusapico4_lpaf103_dgii_data_instance = Aicusapico4LPAf103DGIiData.from_json(json) +# print the JSON string representation of the object +print(Aicusapico4LPAf103DGIiData.to_json()) + +# convert the object into a dict +aicusapico4_lpaf103_dgii_data_dict = aicusapico4_lpaf103_dgii_data_instance.to_dict() +# create an instance of Aicusapico4LPAf103DGIiData from a dict +aicusapico4_lpaf103_dgii_data_from_dict = Aicusapico4LPAf103DGIiData.from_dict(aicusapico4_lpaf103_dgii_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md b/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md deleted file mode 100644 index 7c0df331d..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5.md +++ /dev/null @@ -1,31 +0,0 @@ -# Aicusapico4rwMspzeBOe5 - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**result** | **str** | | [optional] -**execution_id** | **str** | | [optional] -**input_payload** | [**Aicusapico4rwMspzeBOe5InputPayload**](Aicusapico4rwMspzeBOe5InputPayload.md) | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico4rwMspzeBOe5 from a JSON string -aicusapico4rw_mspze_boe5_instance = Aicusapico4rwMspzeBOe5.from_json(json) -# print the JSON string representation of the object -print(Aicusapico4rwMspzeBOe5.to_json()) - -# convert the object into a dict -aicusapico4rw_mspze_boe5_dict = aicusapico4rw_mspze_boe5_instance.to_dict() -# create an instance of Aicusapico4rwMspzeBOe5 from a dict -aicusapico4rw_mspze_boe5_from_dict = Aicusapico4rwMspzeBOe5.from_dict(aicusapico4rw_mspze_boe5_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md b/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md deleted file mode 100644 index 2680a966e..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapico51RafCAYOxiZ.md +++ /dev/null @@ -1,30 +0,0 @@ -# Aicusapico51RafCAYOxiZ - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**content_type** | **str** | | -**file_name** | **str** | | - -## Example - -```python -from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapico51RafCAYOxiZ from a JSON string -aicusapico51_raf_cay_oxi_z_instance = Aicusapico51RafCAYOxiZ.from_json(json) -# print the JSON string representation of the object -print(Aicusapico51RafCAYOxiZ.to_json()) - -# convert the object into a dict -aicusapico51_raf_cay_oxi_z_dict = aicusapico51_raf_cay_oxi_z_instance.to_dict() -# create an instance of Aicusapico51RafCAYOxiZ from a dict -aicusapico51_raf_cay_oxi_z_from_dict = Aicusapico51RafCAYOxiZ.from_dict(aicusapico51_raf_cay_oxi_z_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMO.md b/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMO.md new file mode 100644 index 000000000..54508b41d --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMO.md @@ -0,0 +1,30 @@ +# Aicusapico5ObTetko9oMO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**items** | [**List[Aicusapico5ObTetko9oMOItemsInner]**](Aicusapico5ObTetko9oMOItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico5_ob_tetko9o_mo import Aicusapico5ObTetko9oMO + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico5ObTetko9oMO from a JSON string +aicusapico5_ob_tetko9o_mo_instance = Aicusapico5ObTetko9oMO.from_json(json) +# print the JSON string representation of the object +print(Aicusapico5ObTetko9oMO.to_json()) + +# convert the object into a dict +aicusapico5_ob_tetko9o_mo_dict = aicusapico5_ob_tetko9o_mo_instance.to_dict() +# create an instance of Aicusapico5ObTetko9oMO from a dict +aicusapico5_ob_tetko9o_mo_from_dict = Aicusapico5ObTetko9oMO.from_dict(aicusapico5_ob_tetko9o_mo_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMOItemsInner.md b/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMOItemsInner.md new file mode 100644 index 000000000..323c84875 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapico5ObTetko9oMOItemsInner.md @@ -0,0 +1,34 @@ +# Aicusapico5ObTetko9oMOItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**execution_id** | **str** | | +**s3_path** | **str** | | +**create_time** | **str** | | +**s3_prefix** | **str** | | +**s3_bucket** | **str** | | +**status** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico5_ob_tetko9o_mo_items_inner import Aicusapico5ObTetko9oMOItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapico5ObTetko9oMOItemsInner from a JSON string +aicusapico5_ob_tetko9o_mo_items_inner_instance = Aicusapico5ObTetko9oMOItemsInner.from_json(json) +# print the JSON string representation of the object +print(Aicusapico5ObTetko9oMOItemsInner.to_json()) + +# convert the object into a dict +aicusapico5_ob_tetko9o_mo_items_inner_dict = aicusapico5_ob_tetko9o_mo_items_inner_instance.to_dict() +# create an instance of Aicusapico5ObTetko9oMOItemsInner from a dict +aicusapico5_ob_tetko9o_mo_items_inner_from_dict = Aicusapico5ObTetko9oMOItemsInner.from_dict(aicusapico5_ob_tetko9o_mo_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoDPw375iu4xb1.md b/api_test/biz_logic/rest_api/docs/AicusapicoDPw375iu4xb1.md new file mode 100644 index 000000000..be1ccbde6 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoDPw375iu4xb1.md @@ -0,0 +1,29 @@ +# AicusapicoDPw375iu4xb1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**execution_id** | **List[str]** | | + +## Example + +```python +from openapi_client.models.aicusapico_dpw375iu4xb1 import AicusapicoDPw375iu4xb1 + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoDPw375iu4xb1 from a JSON string +aicusapico_dpw375iu4xb1_instance = AicusapicoDPw375iu4xb1.from_json(json) +# print the JSON string representation of the object +print(AicusapicoDPw375iu4xb1.to_json()) + +# convert the object into a dict +aicusapico_dpw375iu4xb1_dict = aicusapico_dpw375iu4xb1_instance.to_dict() +# create an instance of AicusapicoDPw375iu4xb1 from a dict +aicusapico_dpw375iu4xb1_from_dict = AicusapicoDPw375iu4xb1.from_dict(aicusapico_dpw375iu4xb1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggI.md b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggI.md new file mode 100644 index 000000000..491de551d --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggI.md @@ -0,0 +1,31 @@ +# AicusapicoHWyvBnB1QggI + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**AicusapicoHWyvBnB1QggIConfig**](AicusapicoHWyvBnB1QggIConfig.md) | | [optional] +**items** | [**List[AicusapicoHWyvBnB1QggIItemsInner]**](AicusapicoHWyvBnB1QggIItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i import AicusapicoHWyvBnB1QggI + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoHWyvBnB1QggI from a JSON string +aicusapico_h_wyv_bn_b1_qgg_i_instance = AicusapicoHWyvBnB1QggI.from_json(json) +# print the JSON string representation of the object +print(AicusapicoHWyvBnB1QggI.to_json()) + +# convert the object into a dict +aicusapico_h_wyv_bn_b1_qgg_i_dict = aicusapico_h_wyv_bn_b1_qgg_i_instance.to_dict() +# create an instance of AicusapicoHWyvBnB1QggI from a dict +aicusapico_h_wyv_bn_b1_qgg_i_from_dict = AicusapicoHWyvBnB1QggI.from_dict(aicusapico_h_wyv_bn_b1_qgg_i_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIConfig.md b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIConfig.md new file mode 100644 index 000000000..20dedc3f2 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIConfig.md @@ -0,0 +1,30 @@ +# AicusapicoHWyvBnB1QggIConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**page_size** | **int** | | [optional] +**max_items** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoHWyvBnB1QggIConfig from a JSON string +aicusapico_h_wyv_bn_b1_qgg_i_config_instance = AicusapicoHWyvBnB1QggIConfig.from_json(json) +# print the JSON string representation of the object +print(AicusapicoHWyvBnB1QggIConfig.to_json()) + +# convert the object into a dict +aicusapico_h_wyv_bn_b1_qgg_i_config_dict = aicusapico_h_wyv_bn_b1_qgg_i_config_instance.to_dict() +# create an instance of AicusapicoHWyvBnB1QggIConfig from a dict +aicusapico_h_wyv_bn_b1_qgg_i_config_from_dict = AicusapicoHWyvBnB1QggIConfig.from_dict(aicusapico_h_wyv_bn_b1_qgg_i_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIItemsInner.md similarity index 52% rename from api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md rename to api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIItemsInner.md index ab7e0b012..f6680510e 100644 --- a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1ASItemsInner.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoHWyvBnB1QggIItemsInner.md @@ -1,4 +1,4 @@ -# Aicusapicob9jxGQ8zv1ASItemsInner +# AicusapicoHWyvBnB1QggIItemsInner ## Properties @@ -19,19 +19,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_items_inner import AicusapicoHWyvBnB1QggIItemsInner # TODO update the JSON string below json = "{}" -# create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a JSON string -aicusapicob9jx_gq8zv1_as_items_inner_instance = Aicusapicob9jxGQ8zv1ASItemsInner.from_json(json) +# create an instance of AicusapicoHWyvBnB1QggIItemsInner from a JSON string +aicusapico_h_wyv_bn_b1_qgg_i_items_inner_instance = AicusapicoHWyvBnB1QggIItemsInner.from_json(json) # print the JSON string representation of the object -print(Aicusapicob9jxGQ8zv1ASItemsInner.to_json()) +print(AicusapicoHWyvBnB1QggIItemsInner.to_json()) # convert the object into a dict -aicusapicob9jx_gq8zv1_as_items_inner_dict = aicusapicob9jx_gq8zv1_as_items_inner_instance.to_dict() -# create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a dict -aicusapicob9jx_gq8zv1_as_items_inner_from_dict = Aicusapicob9jxGQ8zv1ASItemsInner.from_dict(aicusapicob9jx_gq8zv1_as_items_inner_dict) +aicusapico_h_wyv_bn_b1_qgg_i_items_inner_dict = aicusapico_h_wyv_bn_b1_qgg_i_items_inner_instance.to_dict() +# create an instance of AicusapicoHWyvBnB1QggIItemsInner from a dict +aicusapico_h_wyv_bn_b1_qgg_i_items_inner_from_dict = AicusapicoHWyvBnB1QggIItemsInner.from_dict(aicusapico_h_wyv_bn_b1_qgg_i_items_inner_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md b/api_test/biz_logic/rest_api/docs/AicusapicoKUtg5hw5MQ23.md similarity index 52% rename from api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md rename to api_test/biz_logic/rest_api/docs/AicusapicoKUtg5hw5MQ23.md index 01954317a..f9be4d4a1 100644 --- a/api_test/biz_logic/rest_api/docs/Aicusapicoh5w3FRwxBjhG.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoKUtg5hw5MQ23.md @@ -1,4 +1,4 @@ -# Aicusapicoh5w3FRwxBjhG +# AicusapicoKUtg5hw5MQ23 ## Properties @@ -13,19 +13,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG +from openapi_client.models.aicusapico_k_utg5hw5_mq23 import AicusapicoKUtg5hw5MQ23 # TODO update the JSON string below json = "{}" -# create an instance of Aicusapicoh5w3FRwxBjhG from a JSON string -aicusapicoh5w3_f_rwx_bjh_g_instance = Aicusapicoh5w3FRwxBjhG.from_json(json) +# create an instance of AicusapicoKUtg5hw5MQ23 from a JSON string +aicusapico_k_utg5hw5_mq23_instance = AicusapicoKUtg5hw5MQ23.from_json(json) # print the JSON string representation of the object -print(Aicusapicoh5w3FRwxBjhG.to_json()) +print(AicusapicoKUtg5hw5MQ23.to_json()) # convert the object into a dict -aicusapicoh5w3_f_rwx_bjh_g_dict = aicusapicoh5w3_f_rwx_bjh_g_instance.to_dict() -# create an instance of Aicusapicoh5w3FRwxBjhG from a dict -aicusapicoh5w3_f_rwx_bjh_g_from_dict = Aicusapicoh5w3FRwxBjhG.from_dict(aicusapicoh5w3_f_rwx_bjh_g_dict) +aicusapico_k_utg5hw5_mq23_dict = aicusapico_k_utg5hw5_mq23_instance.to_dict() +# create an instance of AicusapicoKUtg5hw5MQ23 from a dict +aicusapico_k_utg5hw5_mq23_from_dict = AicusapicoKUtg5hw5MQ23.from_dict(aicusapico_k_utg5hw5_mq23_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8.md b/api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8.md new file mode 100644 index 000000000..025f4087b --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8.md @@ -0,0 +1,31 @@ +# AicusapicoNPq1TceemSd8 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**AicusapicoHWyvBnB1QggIConfig**](AicusapicoHWyvBnB1QggIConfig.md) | | [optional] +**items** | [**List[AicusapicoNPq1TceemSd8ItemsInner]**](AicusapicoNPq1TceemSd8ItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_npq1_tceem_sd8 import AicusapicoNPq1TceemSd8 + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoNPq1TceemSd8 from a JSON string +aicusapico_npq1_tceem_sd8_instance = AicusapicoNPq1TceemSd8.from_json(json) +# print the JSON string representation of the object +print(AicusapicoNPq1TceemSd8.to_json()) + +# convert the object into a dict +aicusapico_npq1_tceem_sd8_dict = aicusapico_npq1_tceem_sd8_instance.to_dict() +# create an instance of AicusapicoNPq1TceemSd8 from a dict +aicusapico_npq1_tceem_sd8_from_dict = AicusapicoNPq1TceemSd8.from_dict(aicusapico_npq1_tceem_sd8_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8ItemsInner.md similarity index 59% rename from api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md rename to api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8ItemsInner.md index 59c11f893..c416b3399 100644 --- a/api_test/biz_logic/rest_api/docs/Aicusapico2TwvXbhsTncyItemsInner.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoNPq1TceemSd8ItemsInner.md @@ -1,4 +1,4 @@ -# Aicusapico2TwvXbhsTncyItemsInner +# AicusapicoNPq1TceemSd8ItemsInner ## Properties @@ -24,19 +24,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner +from openapi_client.models.aicusapico_npq1_tceem_sd8_items_inner import AicusapicoNPq1TceemSd8ItemsInner # TODO update the JSON string below json = "{}" -# create an instance of Aicusapico2TwvXbhsTncyItemsInner from a JSON string -aicusapico2_twv_xbhs_tncy_items_inner_instance = Aicusapico2TwvXbhsTncyItemsInner.from_json(json) +# create an instance of AicusapicoNPq1TceemSd8ItemsInner from a JSON string +aicusapico_npq1_tceem_sd8_items_inner_instance = AicusapicoNPq1TceemSd8ItemsInner.from_json(json) # print the JSON string representation of the object -print(Aicusapico2TwvXbhsTncyItemsInner.to_json()) +print(AicusapicoNPq1TceemSd8ItemsInner.to_json()) # convert the object into a dict -aicusapico2_twv_xbhs_tncy_items_inner_dict = aicusapico2_twv_xbhs_tncy_items_inner_instance.to_dict() -# create an instance of Aicusapico2TwvXbhsTncyItemsInner from a dict -aicusapico2_twv_xbhs_tncy_items_inner_from_dict = Aicusapico2TwvXbhsTncyItemsInner.from_dict(aicusapico2_twv_xbhs_tncy_items_inner_dict) +aicusapico_npq1_tceem_sd8_items_inner_dict = aicusapico_npq1_tceem_sd8_items_inner_instance.to_dict() +# create an instance of AicusapicoNPq1TceemSd8ItemsInner from a dict +aicusapico_npq1_tceem_sd8_items_inner_from_dict = AicusapicoNPq1TceemSd8ItemsInner.from_dict(aicusapico_npq1_tceem_sd8_items_inner_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrK.md b/api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrK.md new file mode 100644 index 000000000..68b1ec738 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrK.md @@ -0,0 +1,31 @@ +# AicusapicoOzq0ulOG1nrK + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**result** | **str** | | [optional] +**execution_id** | **str** | | [optional] +**input_payload** | [**AicusapicoOzq0ulOG1nrKInputPayload**](AicusapicoOzq0ulOG1nrKInputPayload.md) | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_ozq0ul_og1nr_k import AicusapicoOzq0ulOG1nrK + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoOzq0ulOG1nrK from a JSON string +aicusapico_ozq0ul_og1nr_k_instance = AicusapicoOzq0ulOG1nrK.from_json(json) +# print the JSON string representation of the object +print(AicusapicoOzq0ulOG1nrK.to_json()) + +# convert the object into a dict +aicusapico_ozq0ul_og1nr_k_dict = aicusapico_ozq0ul_og1nr_k_instance.to_dict() +# create an instance of AicusapicoOzq0ulOG1nrK from a dict +aicusapico_ozq0ul_og1nr_k_from_dict = AicusapicoOzq0ulOG1nrK.from_dict(aicusapico_ozq0ul_og1nr_k_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md b/api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrKInputPayload.md similarity index 51% rename from api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md rename to api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrKInputPayload.md index 63f10d1f5..e761205de 100644 --- a/api_test/biz_logic/rest_api/docs/Aicusapico4rwMspzeBOe5InputPayload.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoOzq0ulOG1nrKInputPayload.md @@ -1,4 +1,4 @@ -# Aicusapico4rwMspzeBOe5InputPayload +# AicusapicoOzq0ulOG1nrKInputPayload ## Properties @@ -15,19 +15,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload +from openapi_client.models.aicusapico_ozq0ul_og1nr_k_input_payload import AicusapicoOzq0ulOG1nrKInputPayload # TODO update the JSON string below json = "{}" -# create an instance of Aicusapico4rwMspzeBOe5InputPayload from a JSON string -aicusapico4rw_mspze_boe5_input_payload_instance = Aicusapico4rwMspzeBOe5InputPayload.from_json(json) +# create an instance of AicusapicoOzq0ulOG1nrKInputPayload from a JSON string +aicusapico_ozq0ul_og1nr_k_input_payload_instance = AicusapicoOzq0ulOG1nrKInputPayload.from_json(json) # print the JSON string representation of the object -print(Aicusapico4rwMspzeBOe5InputPayload.to_json()) +print(AicusapicoOzq0ulOG1nrKInputPayload.to_json()) # convert the object into a dict -aicusapico4rw_mspze_boe5_input_payload_dict = aicusapico4rw_mspze_boe5_input_payload_instance.to_dict() -# create an instance of Aicusapico4rwMspzeBOe5InputPayload from a dict -aicusapico4rw_mspze_boe5_input_payload_from_dict = Aicusapico4rwMspzeBOe5InputPayload.from_dict(aicusapico4rw_mspze_boe5_input_payload_dict) +aicusapico_ozq0ul_og1nr_k_input_payload_dict = aicusapico_ozq0ul_og1nr_k_input_payload_instance.to_dict() +# create an instance of AicusapicoOzq0ulOG1nrKInputPayload from a dict +aicusapico_ozq0ul_og1nr_k_input_payload_from_dict = AicusapicoOzq0ulOG1nrKInputPayload.from_dict(aicusapico_ozq0ul_og1nr_k_input_payload_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md deleted file mode 100644 index dad248e0b..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86.md +++ /dev/null @@ -1,30 +0,0 @@ -# AicusapicoQjcoKzzZFI86 - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**items** | [**List[AicusapicoQjcoKzzZFI86ItemsInner]**](AicusapicoQjcoKzzZFI86ItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicoQjcoKzzZFI86 from a JSON string -aicusapico_qjco_kzz_zfi86_instance = AicusapicoQjcoKzzZFI86.from_json(json) -# print the JSON string representation of the object -print(AicusapicoQjcoKzzZFI86.to_json()) - -# convert the object into a dict -aicusapico_qjco_kzz_zfi86_dict = aicusapico_qjco_kzz_zfi86_instance.to_dict() -# create an instance of AicusapicoQjcoKzzZFI86 from a dict -aicusapico_qjco_kzz_zfi86_from_dict = AicusapicoQjcoKzzZFI86.from_dict(aicusapico_qjco_kzz_zfi86_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md deleted file mode 100644 index 4d0914e60..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicoQjcoKzzZFI86ItemsInner.md +++ /dev/null @@ -1,34 +0,0 @@ -# AicusapicoQjcoKzzZFI86ItemsInner - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**execution_id** | **str** | | -**s3_path** | **str** | | -**create_time** | **str** | | -**s3_prefix** | **str** | | -**s3_bucket** | **str** | | -**status** | **str** | | - -## Example - -```python -from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a JSON string -aicusapico_qjco_kzz_zfi86_items_inner_instance = AicusapicoQjcoKzzZFI86ItemsInner.from_json(json) -# print the JSON string representation of the object -print(AicusapicoQjcoKzzZFI86ItemsInner.to_json()) - -# convert the object into a dict -aicusapico_qjco_kzz_zfi86_items_inner_dict = aicusapico_qjco_kzz_zfi86_items_inner_instance.to_dict() -# create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a dict -aicusapico_qjco_kzz_zfi86_items_inner_from_dict = AicusapicoQjcoKzzZFI86ItemsInner.from_dict(aicusapico_qjco_kzz_zfi86_items_inner_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md b/api_test/biz_logic/rest_api/docs/AicusapicoTaAUp0RjHHQ0.md similarity index 55% rename from api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md rename to api_test/biz_logic/rest_api/docs/AicusapicoTaAUp0RjHHQ0.md index 0484ad594..1c47c2b51 100644 --- a/api_test/biz_logic/rest_api/docs/AicusapicoseOArXMRpSNs.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoTaAUp0RjHHQ0.md @@ -1,4 +1,4 @@ -# AicusapicoseOArXMRpSNs +# AicusapicoTaAUp0RjHHQ0 ## Properties @@ -16,19 +16,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs +from openapi_client.models.aicusapico_ta_aup0_rj_hhq0 import AicusapicoTaAUp0RjHHQ0 # TODO update the JSON string below json = "{}" -# create an instance of AicusapicoseOArXMRpSNs from a JSON string -aicusapicose_oar_xmrp_sns_instance = AicusapicoseOArXMRpSNs.from_json(json) +# create an instance of AicusapicoTaAUp0RjHHQ0 from a JSON string +aicusapico_ta_aup0_rj_hhq0_instance = AicusapicoTaAUp0RjHHQ0.from_json(json) # print the JSON string representation of the object -print(AicusapicoseOArXMRpSNs.to_json()) +print(AicusapicoTaAUp0RjHHQ0.to_json()) # convert the object into a dict -aicusapicose_oar_xmrp_sns_dict = aicusapicose_oar_xmrp_sns_instance.to_dict() -# create an instance of AicusapicoseOArXMRpSNs from a dict -aicusapicose_oar_xmrp_sns_from_dict = AicusapicoseOArXMRpSNs.from_dict(aicusapicose_oar_xmrp_sns_dict) +aicusapico_ta_aup0_rj_hhq0_dict = aicusapico_ta_aup0_rj_hhq0_instance.to_dict() +# create an instance of AicusapicoTaAUp0RjHHQ0 from a dict +aicusapico_ta_aup0_rj_hhq0_from_dict = AicusapicoTaAUp0RjHHQ0.from_dict(aicusapico_ta_aup0_rj_hhq0_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md b/api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md deleted file mode 100644 index 8552def5a..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicoUy1YBXiWJ5Aq.md +++ /dev/null @@ -1,32 +0,0 @@ -# AicusapicoUy1YBXiWJ5Aq - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**data** | **str** | | [optional] -**message** | **str** | | [optional] -**s3_prefix** | **str** | | [optional] -**s3_bucket** | **str** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicoUy1YBXiWJ5Aq from a JSON string -aicusapico_uy1_ybxi_wj5_aq_instance = AicusapicoUy1YBXiWJ5Aq.from_json(json) -# print the JSON string representation of the object -print(AicusapicoUy1YBXiWJ5Aq.to_json()) - -# convert the object into a dict -aicusapico_uy1_ybxi_wj5_aq_dict = aicusapico_uy1_ybxi_wj5_aq_instance.to_dict() -# create an instance of AicusapicoUy1YBXiWJ5Aq from a dict -aicusapico_uy1_ybxi_wj5_aq_from_dict = AicusapicoUy1YBXiWJ5Aq.from_dict(aicusapico_uy1_ybxi_wj5_aq_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzF.md b/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzF.md new file mode 100644 index 000000000..25515995f --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzF.md @@ -0,0 +1,32 @@ +# AicusapicoYa9VOrUQINzF + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_ids** | **List[str]** | | [optional] +**config** | [**AicusapicoHWyvBnB1QggIConfig**](AicusapicoHWyvBnB1QggIConfig.md) | | [optional] +**items** | [**List[AicusapicoYa9VOrUQINzFItemsInner]**](AicusapicoYa9VOrUQINzFItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapico_ya9_vor_uqinz_f import AicusapicoYa9VOrUQINzF + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoYa9VOrUQINzF from a JSON string +aicusapico_ya9_vor_uqinz_f_instance = AicusapicoYa9VOrUQINzF.from_json(json) +# print the JSON string representation of the object +print(AicusapicoYa9VOrUQINzF.to_json()) + +# convert the object into a dict +aicusapico_ya9_vor_uqinz_f_dict = aicusapico_ya9_vor_uqinz_f_instance.to_dict() +# create an instance of AicusapicoYa9VOrUQINzF from a dict +aicusapico_ya9_vor_uqinz_f_from_dict = AicusapicoYa9VOrUQINzF.from_dict(aicusapico_ya9_vor_uqinz_f_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzFItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzFItemsInner.md new file mode 100644 index 000000000..b049fd1d0 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoYa9VOrUQINzFItemsInner.md @@ -0,0 +1,32 @@ +# AicusapicoYa9VOrUQINzFItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | +**model_name** | **str** | | +**last_modified_time** | **str** | | +**model_id** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapico_ya9_vor_uqinz_f_items_inner import AicusapicoYa9VOrUQINzFItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoYa9VOrUQINzFItemsInner from a JSON string +aicusapico_ya9_vor_uqinz_f_items_inner_instance = AicusapicoYa9VOrUQINzFItemsInner.from_json(json) +# print the JSON string representation of the object +print(AicusapicoYa9VOrUQINzFItemsInner.to_json()) + +# convert the object into a dict +aicusapico_ya9_vor_uqinz_f_items_inner_dict = aicusapico_ya9_vor_uqinz_f_items_inner_instance.to_dict() +# create an instance of AicusapicoYa9VOrUQINzFItemsInner from a dict +aicusapico_ya9_vor_uqinz_f_items_inner_from_dict = AicusapicoYa9VOrUQINzFItemsInner.from_dict(aicusapico_ya9_vor_uqinz_f_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md b/api_test/biz_logic/rest_api/docs/AicusapicoZeNx832zHfgx.md similarity index 54% rename from api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md rename to api_test/biz_logic/rest_api/docs/AicusapicoZeNx832zHfgx.md index c3c8b5f18..33f9c1a1d 100644 --- a/api_test/biz_logic/rest_api/docs/AicusapicoEOcLNul8cwxa.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicoZeNx832zHfgx.md @@ -1,4 +1,4 @@ -# AicusapicoEOcLNul8cwxa +# AicusapicoZeNx832zHfgx ## Properties @@ -14,19 +14,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.models.aicusapico_ze_nx832z_hfgx import AicusapicoZeNx832zHfgx # TODO update the JSON string below json = "{}" -# create an instance of AicusapicoEOcLNul8cwxa from a JSON string -aicusapico_eoc_l_nul8cwxa_instance = AicusapicoEOcLNul8cwxa.from_json(json) +# create an instance of AicusapicoZeNx832zHfgx from a JSON string +aicusapico_ze_nx832z_hfgx_instance = AicusapicoZeNx832zHfgx.from_json(json) # print the JSON string representation of the object -print(AicusapicoEOcLNul8cwxa.to_json()) +print(AicusapicoZeNx832zHfgx.to_json()) # convert the object into a dict -aicusapico_eoc_l_nul8cwxa_dict = aicusapico_eoc_l_nul8cwxa_instance.to_dict() -# create an instance of AicusapicoEOcLNul8cwxa from a dict -aicusapico_eoc_l_nul8cwxa_from_dict = AicusapicoEOcLNul8cwxa.from_dict(aicusapico_eoc_l_nul8cwxa_dict) +aicusapico_ze_nx832z_hfgx_dict = aicusapico_ze_nx832z_hfgx_instance.to_dict() +# create an instance of AicusapicoZeNx832zHfgx from a dict +aicusapico_ze_nx832z_hfgx_from_dict = AicusapicoZeNx832zHfgx.from_dict(aicusapico_ze_nx832z_hfgx_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlR.md b/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlR.md new file mode 100644 index 000000000..a50805255 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlR.md @@ -0,0 +1,32 @@ +# AicusapicoaOehYyqx8qlR + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**chatbot_id** | **str** | | [optional] +**group_name** | **str** | | [optional] +**index_ids** | [**AicusapicoaOehYyqx8qlRIndexIds**](AicusapicoaOehYyqx8qlRIndexIds.md) | | [optional] +**message** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r import AicusapicoaOehYyqx8qlR + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoaOehYyqx8qlR from a JSON string +aicusapicoa_oeh_yyqx8ql_r_instance = AicusapicoaOehYyqx8qlR.from_json(json) +# print the JSON string representation of the object +print(AicusapicoaOehYyqx8qlR.to_json()) + +# convert the object into a dict +aicusapicoa_oeh_yyqx8ql_r_dict = aicusapicoa_oeh_yyqx8ql_r_instance.to_dict() +# create an instance of AicusapicoaOehYyqx8qlR from a dict +aicusapicoa_oeh_yyqx8ql_r_from_dict = AicusapicoaOehYyqx8qlR.from_dict(aicusapicoa_oeh_yyqx8ql_r_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlRIndexIds.md b/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlRIndexIds.md new file mode 100644 index 000000000..7d7266ae0 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoaOehYyqx8qlRIndexIds.md @@ -0,0 +1,31 @@ +# AicusapicoaOehYyqx8qlRIndexIds + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**qq** | **str** | | [optional] +**qd** | **str** | | [optional] +**intention** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r_index_ids import AicusapicoaOehYyqx8qlRIndexIds + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoaOehYyqx8qlRIndexIds from a JSON string +aicusapicoa_oeh_yyqx8ql_r_index_ids_instance = AicusapicoaOehYyqx8qlRIndexIds.from_json(json) +# print the JSON string representation of the object +print(AicusapicoaOehYyqx8qlRIndexIds.to_json()) + +# convert the object into a dict +aicusapicoa_oeh_yyqx8ql_r_index_ids_dict = aicusapicoa_oeh_yyqx8ql_r_index_ids_instance.to_dict() +# create an instance of AicusapicoaOehYyqx8qlRIndexIds from a dict +aicusapicoa_oeh_yyqx8ql_r_index_ids_from_dict = AicusapicoaOehYyqx8qlRIndexIds.from_dict(aicusapicoa_oeh_yyqx8ql_r_index_ids_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md b/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md deleted file mode 100644 index e27699397..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapicob9jxGQ8zv1AS.md +++ /dev/null @@ -1,31 +0,0 @@ -# Aicusapicob9jxGQ8zv1AS - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] -**items** | [**List[Aicusapicob9jxGQ8zv1ASItemsInner]**](Aicusapicob9jxGQ8zv1ASItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapicob9jxGQ8zv1AS from a JSON string -aicusapicob9jx_gq8zv1_as_instance = Aicusapicob9jxGQ8zv1AS.from_json(json) -# print the JSON string representation of the object -print(Aicusapicob9jxGQ8zv1AS.to_json()) - -# convert the object into a dict -aicusapicob9jx_gq8zv1_as_dict = aicusapicob9jx_gq8zv1_as_instance.to_dict() -# create an instance of Aicusapicob9jxGQ8zv1AS from a dict -aicusapicob9jx_gq8zv1_as_from_dict = Aicusapicob9jxGQ8zv1AS.from_dict(aicusapicob9jx_gq8zv1_as_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md deleted file mode 100644 index b7595bf4b..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8.md +++ /dev/null @@ -1,32 +0,0 @@ -# AicusapicobMN2pLK9AvE8 - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**model_name** | **str** | | -**chatbot_id** | **str** | | -**model_id** | **str** | | -**index** | [**AicusapicobMN2pLK9AvE8Index**](AicusapicobMN2pLK9AvE8Index.md) | | - -## Example - -```python -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicobMN2pLK9AvE8 from a JSON string -aicusapicob_mn2p_lk9_av_e8_instance = AicusapicobMN2pLK9AvE8.from_json(json) -# print the JSON string representation of the object -print(AicusapicobMN2pLK9AvE8.to_json()) - -# convert the object into a dict -aicusapicob_mn2p_lk9_av_e8_dict = aicusapicob_mn2p_lk9_av_e8_instance.to_dict() -# create an instance of AicusapicobMN2pLK9AvE8 from a dict -aicusapicob_mn2p_lk9_av_e8_from_dict = AicusapicobMN2pLK9AvE8.from_dict(aicusapicob_mn2p_lk9_av_e8_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md b/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md deleted file mode 100644 index 5a9ddb6ce..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicobMN2pLK9AvE8Index.md +++ /dev/null @@ -1,31 +0,0 @@ -# AicusapicobMN2pLK9AvE8Index - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**qq** | **str** | | -**qd** | **str** | | -**intention** | **str** | | - -## Example - -```python -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicobMN2pLK9AvE8Index from a JSON string -aicusapicob_mn2p_lk9_av_e8_index_instance = AicusapicobMN2pLK9AvE8Index.from_json(json) -# print the JSON string representation of the object -print(AicusapicobMN2pLK9AvE8Index.to_json()) - -# convert the object into a dict -aicusapicob_mn2p_lk9_av_e8_index_dict = aicusapicob_mn2p_lk9_av_e8_index_instance.to_dict() -# create an instance of AicusapicobMN2pLK9AvE8Index from a dict -aicusapicob_mn2p_lk9_av_e8_index_from_dict = AicusapicobMN2pLK9AvE8Index.from_dict(aicusapicob_mn2p_lk9_av_e8_index_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md b/api_test/biz_logic/rest_api/docs/AicusapicodBETf4Zuz6WH.md similarity index 51% rename from api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md rename to api_test/biz_logic/rest_api/docs/AicusapicodBETf4Zuz6WH.md index 0801851ff..3d71e95e4 100644 --- a/api_test/biz_logic/rest_api/docs/AicusapicoCyd129M65yKV.md +++ b/api_test/biz_logic/rest_api/docs/AicusapicodBETf4Zuz6WH.md @@ -1,4 +1,4 @@ -# AicusapicoCyd129M65yKV +# AicusapicodBETf4Zuz6WH ## Properties @@ -11,19 +11,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV +from openapi_client.models.aicusapicod_betf4_zuz6_wh import AicusapicodBETf4Zuz6WH # TODO update the JSON string below json = "{}" -# create an instance of AicusapicoCyd129M65yKV from a JSON string -aicusapico_cyd129_m65y_kv_instance = AicusapicoCyd129M65yKV.from_json(json) +# create an instance of AicusapicodBETf4Zuz6WH from a JSON string +aicusapicod_betf4_zuz6_wh_instance = AicusapicodBETf4Zuz6WH.from_json(json) # print the JSON string representation of the object -print(AicusapicoCyd129M65yKV.to_json()) +print(AicusapicodBETf4Zuz6WH.to_json()) # convert the object into a dict -aicusapico_cyd129_m65y_kv_dict = aicusapico_cyd129_m65y_kv_instance.to_dict() -# create an instance of AicusapicoCyd129M65yKV from a dict -aicusapico_cyd129_m65y_kv_from_dict = AicusapicoCyd129M65yKV.from_dict(aicusapico_cyd129_m65y_kv_dict) +aicusapicod_betf4_zuz6_wh_dict = aicusapicod_betf4_zuz6_wh_instance.to_dict() +# create an instance of AicusapicodBETf4Zuz6WH from a dict +aicusapicod_betf4_zuz6_wh_from_dict = AicusapicodBETf4Zuz6WH.from_dict(aicusapicod_betf4_zuz6_wh_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md deleted file mode 100644 index 76ecb8961..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQS.md +++ /dev/null @@ -1,32 +0,0 @@ -# AicusapicohQbFv37cvtQS - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**chatbot_id** | **str** | | [optional] -**group_name** | **str** | | [optional] -**index_ids** | [**AicusapicohQbFv37cvtQSIndexIds**](AicusapicohQbFv37cvtQSIndexIds.md) | | [optional] -**message** | **str** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicohQbFv37cvtQS from a JSON string -aicusapicoh_qb_fv37cvt_qs_instance = AicusapicohQbFv37cvtQS.from_json(json) -# print the JSON string representation of the object -print(AicusapicohQbFv37cvtQS.to_json()) - -# convert the object into a dict -aicusapicoh_qb_fv37cvt_qs_dict = aicusapicoh_qb_fv37cvt_qs_instance.to_dict() -# create an instance of AicusapicohQbFv37cvtQS from a dict -aicusapicoh_qb_fv37cvt_qs_from_dict = AicusapicohQbFv37cvtQS.from_dict(aicusapicoh_qb_fv37cvt_qs_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md b/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md deleted file mode 100644 index b6afe94e8..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicohQbFv37cvtQSIndexIds.md +++ /dev/null @@ -1,31 +0,0 @@ -# AicusapicohQbFv37cvtQSIndexIds - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**qq** | **str** | | [optional] -**qd** | **str** | | [optional] -**intention** | **str** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicohQbFv37cvtQSIndexIds from a JSON string -aicusapicoh_qb_fv37cvt_qs_index_ids_instance = AicusapicohQbFv37cvtQSIndexIds.from_json(json) -# print the JSON string representation of the object -print(AicusapicohQbFv37cvtQSIndexIds.to_json()) - -# convert the object into a dict -aicusapicoh_qb_fv37cvt_qs_index_ids_dict = aicusapicoh_qb_fv37cvt_qs_index_ids_instance.to_dict() -# create an instance of AicusapicohQbFv37cvtQSIndexIds from a dict -aicusapicoh_qb_fv37cvt_qs_index_ids_from_dict = AicusapicohQbFv37cvtQSIndexIds.from_dict(aicusapicoh_qb_fv37cvt_qs_index_ids_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md deleted file mode 100644 index 62d46bd39..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8l.md +++ /dev/null @@ -1,32 +0,0 @@ -# AicusapicoiXUam8N8Dh8l - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**chatbot_ids** | **List[str]** | | [optional] -**config** | [**Aicusapico2TwvXbhsTncyConfig**](Aicusapico2TwvXbhsTncyConfig.md) | | [optional] -**items** | [**List[AicusapicoiXUam8N8Dh8lItemsInner]**](AicusapicoiXUam8N8Dh8lItemsInner.md) | | [optional] -**count** | **int** | | [optional] - -## Example - -```python -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicoiXUam8N8Dh8l from a JSON string -aicusapicoi_x_uam8_n8_dh8l_instance = AicusapicoiXUam8N8Dh8l.from_json(json) -# print the JSON string representation of the object -print(AicusapicoiXUam8N8Dh8l.to_json()) - -# convert the object into a dict -aicusapicoi_x_uam8_n8_dh8l_dict = aicusapicoi_x_uam8_n8_dh8l_instance.to_dict() -# create an instance of AicusapicoiXUam8N8Dh8l from a dict -aicusapicoi_x_uam8_n8_dh8l_from_dict = AicusapicoiXUam8N8Dh8l.from_dict(aicusapicoi_x_uam8_n8_dh8l_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md deleted file mode 100644 index 296417a0c..000000000 --- a/api_test/biz_logic/rest_api/docs/AicusapicoiXUam8N8Dh8lItemsInner.md +++ /dev/null @@ -1,32 +0,0 @@ -# AicusapicoiXUam8N8Dh8lItemsInner - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**chatbot_id** | **str** | | -**model_name** | **str** | | -**last_modified_time** | **str** | | -**model_id** | **str** | | - -## Example - -```python -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner - -# TODO update the JSON string below -json = "{}" -# create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a JSON string -aicusapicoi_x_uam8_n8_dh8l_items_inner_instance = AicusapicoiXUam8N8Dh8lItemsInner.from_json(json) -# print the JSON string representation of the object -print(AicusapicoiXUam8N8Dh8lItemsInner.to_json()) - -# convert the object into a dict -aicusapicoi_x_uam8_n8_dh8l_items_inner_dict = aicusapicoi_x_uam8_n8_dh8l_items_inner_instance.to_dict() -# create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a dict -aicusapicoi_x_uam8_n8_dh8l_items_inner_from_dict = AicusapicoiXUam8N8Dh8lItemsInner.from_dict(aicusapicoi_x_uam8_n8_dh8l_items_inner_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQm.md b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQm.md new file mode 100644 index 000000000..71d1e209a --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQm.md @@ -0,0 +1,30 @@ +# AicusapicoqhVwTerAVPQm + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**items** | [**List[AicusapicoqhVwTerAVPQmItemsInner]**](AicusapicoqhVwTerAVPQmItemsInner.md) | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoqh_vw_ter_avpqm import AicusapicoqhVwTerAVPQm + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoqhVwTerAVPQm from a JSON string +aicusapicoqh_vw_ter_avpqm_instance = AicusapicoqhVwTerAVPQm.from_json(json) +# print the JSON string representation of the object +print(AicusapicoqhVwTerAVPQm.to_json()) + +# convert the object into a dict +aicusapicoqh_vw_ter_avpqm_dict = aicusapicoqh_vw_ter_avpqm_instance.to_dict() +# create an instance of AicusapicoqhVwTerAVPQm from a dict +aicusapicoqh_vw_ter_avpqm_from_dict = AicusapicoqhVwTerAVPQm.from_dict(aicusapicoqh_vw_ter_avpqm_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInner.md new file mode 100644 index 000000000..117b3e0f2 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInner.md @@ -0,0 +1,33 @@ +# AicusapicoqhVwTerAVPQmItemsInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**s3_path** | **str** | | +**create_time** | **str** | | +**s3_prefix** | **str** | | +**qa_list** | [**List[AicusapicoqhVwTerAVPQmItemsInnerQAListInner]**](AicusapicoqhVwTerAVPQmItemsInnerQAListInner.md) | | [optional] +**status** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner import AicusapicoqhVwTerAVPQmItemsInner + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoqhVwTerAVPQmItemsInner from a JSON string +aicusapicoqh_vw_ter_avpqm_items_inner_instance = AicusapicoqhVwTerAVPQmItemsInner.from_json(json) +# print the JSON string representation of the object +print(AicusapicoqhVwTerAVPQmItemsInner.to_json()) + +# convert the object into a dict +aicusapicoqh_vw_ter_avpqm_items_inner_dict = aicusapicoqh_vw_ter_avpqm_items_inner_instance.to_dict() +# create an instance of AicusapicoqhVwTerAVPQmItemsInner from a dict +aicusapicoqh_vw_ter_avpqm_items_inner_from_dict = AicusapicoqhVwTerAVPQmItemsInner.from_dict(aicusapicoqh_vw_ter_avpqm_items_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInnerQAListInner.md b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInnerQAListInner.md new file mode 100644 index 000000000..fe8e6bc5f --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/AicusapicoqhVwTerAVPQmItemsInnerQAListInner.md @@ -0,0 +1,31 @@ +# AicusapicoqhVwTerAVPQmItemsInnerQAListInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**question** | **str** | | [optional] +**kwargs** | **str** | | [optional] +**intention** | **str** | | [optional] + +## Example + +```python +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner import AicusapicoqhVwTerAVPQmItemsInnerQAListInner + +# TODO update the JSON string below +json = "{}" +# create an instance of AicusapicoqhVwTerAVPQmItemsInnerQAListInner from a JSON string +aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner_instance = AicusapicoqhVwTerAVPQmItemsInnerQAListInner.from_json(json) +# print the JSON string representation of the object +print(AicusapicoqhVwTerAVPQmItemsInnerQAListInner.to_json()) + +# convert the object into a dict +aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner_dict = aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner_instance.to_dict() +# create an instance of AicusapicoqhVwTerAVPQmItemsInnerQAListInner from a dict +aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner_from_dict = AicusapicoqhVwTerAVPQmItemsInnerQAListInner.from_dict(aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md b/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md deleted file mode 100644 index 1b9828fa2..000000000 --- a/api_test/biz_logic/rest_api/docs/Aicusapicor1Kt5C2mLnkm.md +++ /dev/null @@ -1,29 +0,0 @@ -# Aicusapicor1Kt5C2mLnkm - - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**execution_id** | **List[str]** | | - -## Example - -```python -from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm - -# TODO update the JSON string below -json = "{}" -# create an instance of Aicusapicor1Kt5C2mLnkm from a JSON string -aicusapicor1_kt5_c2m_lnkm_instance = Aicusapicor1Kt5C2mLnkm.from_json(json) -# print the JSON string representation of the object -print(Aicusapicor1Kt5C2mLnkm.to_json()) - -# convert the object into a dict -aicusapicor1_kt5_c2m_lnkm_dict = aicusapicor1_kt5_c2m_lnkm_instance.to_dict() -# create an instance of Aicusapicor1Kt5C2mLnkm from a dict -aicusapicor1_kt5_c2m_lnkm_from_dict = Aicusapicor1Kt5C2mLnkm.from_dict(aicusapicor1_kt5_c2m_lnkm_dict) -``` -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2.md b/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2.md new file mode 100644 index 000000000..f756a9487 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2.md @@ -0,0 +1,32 @@ +# Aicusapicou6VksROJ90h2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**model_name** | **str** | | +**chatbot_id** | **str** | | +**model_id** | **str** | | +**index** | [**Aicusapicou6VksROJ90h2Index**](Aicusapicou6VksROJ90h2Index.md) | | + +## Example + +```python +from openapi_client.models.aicusapicou6_vks_roj90h2 import Aicusapicou6VksROJ90h2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicou6VksROJ90h2 from a JSON string +aicusapicou6_vks_roj90h2_instance = Aicusapicou6VksROJ90h2.from_json(json) +# print the JSON string representation of the object +print(Aicusapicou6VksROJ90h2.to_json()) + +# convert the object into a dict +aicusapicou6_vks_roj90h2_dict = aicusapicou6_vks_roj90h2_instance.to_dict() +# create an instance of Aicusapicou6VksROJ90h2 from a dict +aicusapicou6_vks_roj90h2_from_dict = Aicusapicou6VksROJ90h2.from_dict(aicusapicou6_vks_roj90h2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2Index.md b/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2Index.md new file mode 100644 index 000000000..aabe12c01 --- /dev/null +++ b/api_test/biz_logic/rest_api/docs/Aicusapicou6VksROJ90h2Index.md @@ -0,0 +1,31 @@ +# Aicusapicou6VksROJ90h2Index + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**qq** | **str** | | +**qd** | **str** | | +**intention** | **str** | | + +## Example + +```python +from openapi_client.models.aicusapicou6_vks_roj90h2_index import Aicusapicou6VksROJ90h2Index + +# TODO update the JSON string below +json = "{}" +# create an instance of Aicusapicou6VksROJ90h2Index from a JSON string +aicusapicou6_vks_roj90h2_index_instance = Aicusapicou6VksROJ90h2Index.from_json(json) +# print the JSON string representation of the object +print(Aicusapicou6VksROJ90h2Index.to_json()) + +# convert the object into a dict +aicusapicou6_vks_roj90h2_index_dict = aicusapicou6_vks_roj90h2_index_instance.to_dict() +# create an instance of Aicusapicou6VksROJ90h2Index from a dict +aicusapicou6_vks_roj90h2_index_from_dict = Aicusapicou6VksROJ90h2Index.from_dict(aicusapicou6_vks_roj90h2_index_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md b/api_test/biz_logic/rest_api/docs/Aicusapicoyip3eUBUK13Z.md similarity index 50% rename from api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md rename to api_test/biz_logic/rest_api/docs/Aicusapicoyip3eUBUK13Z.md index 70dc38633..7a1aa4697 100644 --- a/api_test/biz_logic/rest_api/docs/Aicusapicoqew7t5vTA2ak.md +++ b/api_test/biz_logic/rest_api/docs/Aicusapicoyip3eUBUK13Z.md @@ -1,4 +1,4 @@ -# Aicusapicoqew7t5vTA2ak +# Aicusapicoyip3eUBUK13Z ## Properties @@ -11,19 +11,19 @@ Name | Type | Description | Notes ## Example ```python -from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak +from openapi_client.models.aicusapicoyip3e_ubuk13_z import Aicusapicoyip3eUBUK13Z # TODO update the JSON string below json = "{}" -# create an instance of Aicusapicoqew7t5vTA2ak from a JSON string -aicusapicoqew7t5v_ta2ak_instance = Aicusapicoqew7t5vTA2ak.from_json(json) +# create an instance of Aicusapicoyip3eUBUK13Z from a JSON string +aicusapicoyip3e_ubuk13_z_instance = Aicusapicoyip3eUBUK13Z.from_json(json) # print the JSON string representation of the object -print(Aicusapicoqew7t5vTA2ak.to_json()) +print(Aicusapicoyip3eUBUK13Z.to_json()) # convert the object into a dict -aicusapicoqew7t5v_ta2ak_dict = aicusapicoqew7t5v_ta2ak_instance.to_dict() -# create an instance of Aicusapicoqew7t5vTA2ak from a dict -aicusapicoqew7t5v_ta2ak_from_dict = Aicusapicoqew7t5vTA2ak.from_dict(aicusapicoqew7t5v_ta2ak_dict) +aicusapicoyip3e_ubuk13_z_dict = aicusapicoyip3e_ubuk13_z_instance.to_dict() +# create an instance of Aicusapicoyip3eUBUK13Z from a dict +aicusapicoyip3e_ubuk13_z_from_dict = Aicusapicoyip3eUBUK13Z.from_dict(aicusapicoyip3e_ubuk13_z_dict) ``` [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/api_test/biz_logic/rest_api/docs/DefaultApi.md b/api_test/biz_logic/rest_api/docs/DefaultApi.md index 99557ce54..74ac8e974 100644 --- a/api_test/biz_logic/rest_api/docs/DefaultApi.md +++ b/api_test/biz_logic/rest_api/docs/DefaultApi.md @@ -1,6 +1,6 @@ # openapi_client.DefaultApi -All URIs are relative to *https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod* +All URIs are relative to *https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod* Method | HTTP request | Description ------------- | ------------- | ------------- @@ -74,10 +74,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -127,9 +127,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -146,10 +146,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -205,10 +205,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -258,9 +258,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -278,10 +278,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -331,9 +331,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -350,10 +350,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -408,10 +408,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -467,10 +467,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -520,9 +520,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -540,10 +540,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -593,9 +593,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -612,10 +612,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -658,7 +658,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **chatbot_management_chatbots_get** -> AicusapicoiXUam8N8Dh8l chatbot_management_chatbots_get(page_size=page_size, max_items=max_items) +> AicusapicoYa9VOrUQINzF chatbot_management_chatbots_get(page_size=page_size, max_items=max_items) @@ -668,14 +668,14 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l +from openapi_client.models.aicusapico_ya9_vor_uqinz_f import AicusapicoYa9VOrUQINzF from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -716,7 +716,7 @@ Name | Type | Description | Notes ### Return type -[**AicusapicoiXUam8N8Dh8l**](AicusapicoiXUam8N8Dh8l.md) +[**AicusapicoYa9VOrUQINzF**](AicusapicoYa9VOrUQINzF.md) ### Authorization @@ -731,9 +731,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -750,10 +750,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -796,7 +796,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **chatbot_management_chatbots_post** -> AicusapicohQbFv37cvtQS chatbot_management_chatbots_post(aicusapicob_mn2p_lk9_av_e8) +> AicusapicoaOehYyqx8qlR chatbot_management_chatbots_post(aicusapicou6_vks_roj90h2) @@ -806,15 +806,15 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r import AicusapicoaOehYyqx8qlR +from openapi_client.models.aicusapicou6_vks_roj90h2 import Aicusapicou6VksROJ90h2 from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -832,10 +832,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapicob_mn2p_lk9_av_e8 = openapi_client.AicusapicobMN2pLK9AvE8() # AicusapicobMN2pLK9AvE8 | + aicusapicou6_vks_roj90h2 = openapi_client.Aicusapicou6VksROJ90h2() # Aicusapicou6VksROJ90h2 | try: - api_response = api_instance.chatbot_management_chatbots_post(aicusapicob_mn2p_lk9_av_e8) + api_response = api_instance.chatbot_management_chatbots_post(aicusapicou6_vks_roj90h2) print("The response of DefaultApi->chatbot_management_chatbots_post:\n") pprint(api_response) except Exception as e: @@ -849,11 +849,11 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapicob_mn2p_lk9_av_e8** | [**AicusapicobMN2pLK9AvE8**](AicusapicobMN2pLK9AvE8.md)| | + **aicusapicou6_vks_roj90h2** | [**Aicusapicou6VksROJ90h2**](Aicusapicou6VksROJ90h2.md)| | ### Return type -[**AicusapicohQbFv37cvtQS**](AicusapicohQbFv37cvtQS.md) +[**AicusapicoaOehYyqx8qlR**](AicusapicoaOehYyqx8qlR.md) ### Authorization @@ -868,9 +868,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -887,10 +887,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -946,10 +946,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -999,9 +999,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1019,10 +1019,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1072,9 +1072,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1091,10 +1091,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1150,10 +1150,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1203,9 +1203,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1222,10 +1222,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1280,10 +1280,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1338,10 +1338,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1397,10 +1397,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1450,9 +1450,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1470,10 +1470,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1523,9 +1523,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1542,10 +1542,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1600,10 +1600,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1646,7 +1646,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **intention_execution_presigned_url_post** -> AicusapicoUy1YBXiWJ5Aq intention_execution_presigned_url_post(aicusapico_cyd129_m65y_kv) +> AicusapicoKUtg5hw5MQ23 intention_execution_presigned_url_post(aicusapico2ey_mrt6use_ql) @@ -1656,15 +1656,15 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV -from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq +from openapi_client.models.aicusapico2ey_mrt6use_ql import Aicusapico2eyMRt6useQL +from openapi_client.models.aicusapico_k_utg5hw5_mq23 import AicusapicoKUtg5hw5MQ23 from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1682,10 +1682,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapico_cyd129_m65y_kv = openapi_client.AicusapicoCyd129M65yKV() # AicusapicoCyd129M65yKV | + aicusapico2ey_mrt6use_ql = openapi_client.Aicusapico2eyMRt6useQL() # Aicusapico2eyMRt6useQL | try: - api_response = api_instance.intention_execution_presigned_url_post(aicusapico_cyd129_m65y_kv) + api_response = api_instance.intention_execution_presigned_url_post(aicusapico2ey_mrt6use_ql) print("The response of DefaultApi->intention_execution_presigned_url_post:\n") pprint(api_response) except Exception as e: @@ -1699,11 +1699,11 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapico_cyd129_m65y_kv** | [**AicusapicoCyd129M65yKV**](AicusapicoCyd129M65yKV.md)| | + **aicusapico2ey_mrt6use_ql** | [**Aicusapico2eyMRt6useQL**](Aicusapico2eyMRt6useQL.md)| | ### Return type -[**AicusapicoUy1YBXiWJ5Aq**](AicusapicoUy1YBXiWJ5Aq.md) +[**AicusapicoKUtg5hw5MQ23**](AicusapicoKUtg5hw5MQ23.md) ### Authorization @@ -1718,9 +1718,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1738,10 +1738,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1791,14 +1791,14 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **intention_executions_execution_id_get** -> Aicusapico35klzY80ikPh intention_executions_execution_id_get(intention_id, execution_id) +> AicusapicoqhVwTerAVPQm intention_executions_execution_id_get(intention_id, execution_id) @@ -1808,14 +1808,14 @@ This endpoint does not need any parameter. ```python import openapi_client -from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh +from openapi_client.models.aicusapicoqh_vw_ter_avpqm import AicusapicoqhVwTerAVPQm from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1856,7 +1856,7 @@ Name | Type | Description | Notes ### Return type -[**Aicusapico35klzY80ikPh**](Aicusapico35klzY80ikPh.md) +[**AicusapicoqhVwTerAVPQm**](AicusapicoqhVwTerAVPQm.md) ### Authorization @@ -1871,9 +1871,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -1890,10 +1890,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -1940,7 +1940,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **intention_executions_get** -> Aicusapicob9jxGQ8zv1AS intention_executions_get(page_size=page_size, max_items=max_items) +> AicusapicoHWyvBnB1QggI intention_executions_get(page_size=page_size, max_items=max_items) @@ -1950,14 +1950,14 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i import AicusapicoHWyvBnB1QggI from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -1998,7 +1998,7 @@ Name | Type | Description | Notes ### Return type -[**Aicusapicob9jxGQ8zv1AS**](Aicusapicob9jxGQ8zv1AS.md) +[**AicusapicoHWyvBnB1QggI**](AicusapicoHWyvBnB1QggI.md) ### Authorization @@ -2013,9 +2013,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2032,10 +2032,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2078,7 +2078,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **intention_executions_post** -> Aicusapico4rwMspzeBOe5 intention_executions_post(aicusapico_eoc_l_nul8cwxa) +> AicusapicoOzq0ulOG1nrK intention_executions_post(aicusapico_ze_nx832z_hfgx) @@ -2088,15 +2088,15 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 -from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa +from openapi_client.models.aicusapico_ozq0ul_og1nr_k import AicusapicoOzq0ulOG1nrK +from openapi_client.models.aicusapico_ze_nx832z_hfgx import AicusapicoZeNx832zHfgx from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2114,10 +2114,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapico_eoc_l_nul8cwxa = openapi_client.AicusapicoEOcLNul8cwxa() # AicusapicoEOcLNul8cwxa | + aicusapico_ze_nx832z_hfgx = openapi_client.AicusapicoZeNx832zHfgx() # AicusapicoZeNx832zHfgx | try: - api_response = api_instance.intention_executions_post(aicusapico_eoc_l_nul8cwxa) + api_response = api_instance.intention_executions_post(aicusapico_ze_nx832z_hfgx) print("The response of DefaultApi->intention_executions_post:\n") pprint(api_response) except Exception as e: @@ -2131,11 +2131,11 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapico_eoc_l_nul8cwxa** | [**AicusapicoEOcLNul8cwxa**](AicusapicoEOcLNul8cwxa.md)| | + **aicusapico_ze_nx832z_hfgx** | [**AicusapicoZeNx832zHfgx**](AicusapicoZeNx832zHfgx.md)| | ### Return type -[**Aicusapico4rwMspzeBOe5**](Aicusapico4rwMspzeBOe5.md) +[**AicusapicoOzq0ulOG1nrK**](AicusapicoOzq0ulOG1nrK.md) ### Authorization @@ -2150,9 +2150,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2169,10 +2169,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2228,10 +2228,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2281,9 +2281,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2300,10 +2300,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2346,7 +2346,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_delete** -> Aicusapicoqew7t5vTA2ak knowledge_base_executions_delete(aicusapicor1_kt5_c2m_lnkm) +> Aicusapicoyip3eUBUK13Z knowledge_base_executions_delete(aicusapico_dpw375iu4xb1) @@ -2356,15 +2356,15 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak -from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm +from openapi_client.models.aicusapico_dpw375iu4xb1 import AicusapicoDPw375iu4xb1 +from openapi_client.models.aicusapicoyip3e_ubuk13_z import Aicusapicoyip3eUBUK13Z from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2382,10 +2382,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapicor1_kt5_c2m_lnkm = openapi_client.Aicusapicor1Kt5C2mLnkm() # Aicusapicor1Kt5C2mLnkm | + aicusapico_dpw375iu4xb1 = openapi_client.AicusapicoDPw375iu4xb1() # AicusapicoDPw375iu4xb1 | try: - api_response = api_instance.knowledge_base_executions_delete(aicusapicor1_kt5_c2m_lnkm) + api_response = api_instance.knowledge_base_executions_delete(aicusapico_dpw375iu4xb1) print("The response of DefaultApi->knowledge_base_executions_delete:\n") pprint(api_response) except Exception as e: @@ -2399,11 +2399,11 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapicor1_kt5_c2m_lnkm** | [**Aicusapicor1Kt5C2mLnkm**](Aicusapicor1Kt5C2mLnkm.md)| | + **aicusapico_dpw375iu4xb1** | [**AicusapicoDPw375iu4xb1**](AicusapicoDPw375iu4xb1.md)| | ### Return type -[**Aicusapicoqew7t5vTA2ak**](Aicusapicoqew7t5vTA2ak.md) +[**Aicusapicoyip3eUBUK13Z**](Aicusapicoyip3eUBUK13Z.md) ### Authorization @@ -2418,14 +2418,14 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_execution_id_get** -> AicusapicoQjcoKzzZFI86 knowledge_base_executions_execution_id_get(execution_id) +> Aicusapico5ObTetko9oMO knowledge_base_executions_execution_id_get(execution_id) @@ -2435,14 +2435,14 @@ Name | Type | Description | Notes ```python import openapi_client -from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 +from openapi_client.models.aicusapico5_ob_tetko9o_mo import Aicusapico5ObTetko9oMO from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2481,7 +2481,7 @@ Name | Type | Description | Notes ### Return type -[**AicusapicoQjcoKzzZFI86**](AicusapicoQjcoKzzZFI86.md) +[**Aicusapico5ObTetko9oMO**](Aicusapico5ObTetko9oMO.md) ### Authorization @@ -2496,9 +2496,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2515,10 +2515,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2565,7 +2565,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_get** -> Aicusapico2TwvXbhsTncy knowledge_base_executions_get(page_size=page_size, max_items=max_items) +> AicusapicoNPq1TceemSd8 knowledge_base_executions_get(page_size=page_size, max_items=max_items) @@ -2575,14 +2575,14 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy +from openapi_client.models.aicusapico_npq1_tceem_sd8 import AicusapicoNPq1TceemSd8 from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2623,7 +2623,7 @@ Name | Type | Description | Notes ### Return type -[**Aicusapico2TwvXbhsTncy**](Aicusapico2TwvXbhsTncy.md) +[**AicusapicoNPq1TceemSd8**](AicusapicoNPq1TceemSd8.md) ### Authorization @@ -2638,9 +2638,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2657,10 +2657,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2703,7 +2703,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_executions_post** -> object knowledge_base_executions_post(aicusapicose_oar_xmrp_sns) +> object knowledge_base_executions_post(aicusapico_ta_aup0_rj_hhq0) @@ -2713,14 +2713,14 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs +from openapi_client.models.aicusapico_ta_aup0_rj_hhq0 import AicusapicoTaAUp0RjHHQ0 from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2738,10 +2738,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapicose_oar_xmrp_sns = openapi_client.AicusapicoseOArXMRpSNs() # AicusapicoseOArXMRpSNs | + aicusapico_ta_aup0_rj_hhq0 = openapi_client.AicusapicoTaAUp0RjHHQ0() # AicusapicoTaAUp0RjHHQ0 | try: - api_response = api_instance.knowledge_base_executions_post(aicusapicose_oar_xmrp_sns) + api_response = api_instance.knowledge_base_executions_post(aicusapico_ta_aup0_rj_hhq0) print("The response of DefaultApi->knowledge_base_executions_post:\n") pprint(api_response) except Exception as e: @@ -2755,7 +2755,7 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapicose_oar_xmrp_sns** | [**AicusapicoseOArXMRpSNs**](AicusapicoseOArXMRpSNs.md)| | + **aicusapico_ta_aup0_rj_hhq0** | [**AicusapicoTaAUp0RjHHQ0**](AicusapicoTaAUp0RjHHQ0.md)| | ### Return type @@ -2774,9 +2774,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2793,10 +2793,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2839,7 +2839,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **knowledge_base_kb_presigned_url_post** -> Aicusapicoh5w3FRwxBjhG knowledge_base_kb_presigned_url_post(aicusapico51_raf_cay_oxi_z) +> Aicusapico4LPAf103DGIi knowledge_base_kb_presigned_url_post(aicusapicod_betf4_zuz6_wh) @@ -2849,15 +2849,15 @@ No authorization required ```python import openapi_client -from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ -from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG +from openapi_client.models.aicusapico4_lpaf103_dgii import Aicusapico4LPAf103DGIi +from openapi_client.models.aicusapicod_betf4_zuz6_wh import AicusapicodBETf4Zuz6WH from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -2875,10 +2875,10 @@ configuration.api_key['aicustomerserviceapiconstructApiAuthorizerEB0B49FC'] = os with openapi_client.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = openapi_client.DefaultApi(api_client) - aicusapico51_raf_cay_oxi_z = openapi_client.Aicusapico51RafCAYOxiZ() # Aicusapico51RafCAYOxiZ | + aicusapicod_betf4_zuz6_wh = openapi_client.AicusapicodBETf4Zuz6WH() # AicusapicodBETf4Zuz6WH | try: - api_response = api_instance.knowledge_base_kb_presigned_url_post(aicusapico51_raf_cay_oxi_z) + api_response = api_instance.knowledge_base_kb_presigned_url_post(aicusapicod_betf4_zuz6_wh) print("The response of DefaultApi->knowledge_base_kb_presigned_url_post:\n") pprint(api_response) except Exception as e: @@ -2892,11 +2892,11 @@ with openapi_client.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **aicusapico51_raf_cay_oxi_z** | [**Aicusapico51RafCAYOxiZ**](Aicusapico51RafCAYOxiZ.md)| | + **aicusapicod_betf4_zuz6_wh** | [**AicusapicodBETf4Zuz6WH**](AicusapicodBETf4Zuz6WH.md)| | ### Return type -[**Aicusapicoh5w3FRwxBjhG**](Aicusapicoh5w3FRwxBjhG.md) +[**Aicusapico4LPAf103DGIi**](Aicusapico4LPAf103DGIi.md) ### Authorization @@ -2911,9 +2911,9 @@ Name | Type | Description | Notes | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -2930,10 +2930,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -2988,10 +2988,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -3047,10 +3047,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -3100,9 +3100,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -3120,10 +3120,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -3173,9 +3173,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -3192,10 +3192,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -3250,10 +3250,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -3309,10 +3309,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -3362,9 +3362,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -3381,10 +3381,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -3440,10 +3440,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -3493,9 +3493,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -3513,10 +3513,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) # The client must configure the authentication and authorization parameters @@ -3566,9 +3566,9 @@ This endpoint does not need any parameter. | Status code | Description | Response headers | |-------------|-------------|------------------| +**200** | 200 response | - | **400** | 400 response | - | **500** | 500 response | - | -**200** | 200 response | - | [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) @@ -3585,10 +3585,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) @@ -3643,10 +3643,10 @@ import openapi_client from openapi_client.rest import ApiException from pprint import pprint -# Defining the host is optional and defaults to https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod +# Defining the host is optional and defaults to https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod # See configuration.py for a list of all supported configuration parameters. configuration = openapi_client.Configuration( - host = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" + host = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" ) diff --git a/api_test/biz_logic/rest_api/openapi_client/__init__.py b/api_test/biz_logic/rest_api/openapi_client/__init__.py index 54840b84e..4d53e8f56 100644 --- a/api_test/biz_logic/rest_api/openapi_client/__init__.py +++ b/api_test/biz_logic/rest_api/openapi_client/__init__.py @@ -7,7 +7,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -15,11 +15,9 @@ __version__ = "1.0.0" - import sys import os -# 获取 openapi_client 目录的路径 -openapi_client_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../biz_logic/rest_api')) +openapi_client_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../biz_logic/rest_api")) sys.path.insert(0, openapi_client_path) # import apis into sdk package @@ -37,29 +35,30 @@ from openapi_client.exceptions import ApiException # import models into sdk package -from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig -from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner -from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner -from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 -from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload -from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ -from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV -from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa -from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 -from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner -from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq -from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS -from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index -from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner -from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak -from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm -from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs +from openapi_client.models.aicusapico2ey_mrt6use_ql import Aicusapico2eyMRt6useQL +from openapi_client.models.aicusapico4_lpaf103_dgii import Aicusapico4LPAf103DGIi +from openapi_client.models.aicusapico4_lpaf103_dgii_data import Aicusapico4LPAf103DGIiData +from openapi_client.models.aicusapico5_ob_tetko9o_mo import Aicusapico5ObTetko9oMO +from openapi_client.models.aicusapico5_ob_tetko9o_mo_items_inner import Aicusapico5ObTetko9oMOItemsInner +from openapi_client.models.aicusapico_dpw375iu4xb1 import AicusapicoDPw375iu4xb1 +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i import AicusapicoHWyvBnB1QggI +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_items_inner import AicusapicoHWyvBnB1QggIItemsInner +from openapi_client.models.aicusapico_k_utg5hw5_mq23 import AicusapicoKUtg5hw5MQ23 +from openapi_client.models.aicusapico_npq1_tceem_sd8 import AicusapicoNPq1TceemSd8 +from openapi_client.models.aicusapico_npq1_tceem_sd8_items_inner import AicusapicoNPq1TceemSd8ItemsInner +from openapi_client.models.aicusapico_ozq0ul_og1nr_k import AicusapicoOzq0ulOG1nrK +from openapi_client.models.aicusapico_ozq0ul_og1nr_k_input_payload import AicusapicoOzq0ulOG1nrKInputPayload +from openapi_client.models.aicusapico_ta_aup0_rj_hhq0 import AicusapicoTaAUp0RjHHQ0 +from openapi_client.models.aicusapico_ya9_vor_uqinz_f import AicusapicoYa9VOrUQINzF +from openapi_client.models.aicusapico_ya9_vor_uqinz_f_items_inner import AicusapicoYa9VOrUQINzFItemsInner +from openapi_client.models.aicusapico_ze_nx832z_hfgx import AicusapicoZeNx832zHfgx +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r import AicusapicoaOehYyqx8qlR +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r_index_ids import AicusapicoaOehYyqx8qlRIndexIds +from openapi_client.models.aicusapicod_betf4_zuz6_wh import AicusapicodBETf4Zuz6WH +from openapi_client.models.aicusapicoqh_vw_ter_avpqm import AicusapicoqhVwTerAVPQm +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner import AicusapicoqhVwTerAVPQmItemsInner +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner import AicusapicoqhVwTerAVPQmItemsInnerQAListInner +from openapi_client.models.aicusapicou6_vks_roj90h2 import Aicusapicou6VksROJ90h2 +from openapi_client.models.aicusapicou6_vks_roj90h2_index import Aicusapicou6VksROJ90h2Index +from openapi_client.models.aicusapicoyip3e_ubuk13_z import Aicusapicoyip3eUBUK13Z diff --git a/api_test/biz_logic/rest_api/openapi_client/api/default_api.py b/api_test/biz_logic/rest_api/openapi_client/api/default_api.py index 59b4c90bb..464121a72 100644 --- a/api_test/biz_logic/rest_api/openapi_client/api/default_api.py +++ b/api_test/biz_logic/rest_api/openapi_client/api/default_api.py @@ -5,38 +5,35 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. """ # noqa: E501 import warnings -# from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt from typing import Any, Dict, List, Optional, Tuple, Union - -from pydantic import StrictFloat, StrictStr, StrictInt, validate_call, BaseModel, Field -# from pydantic.aliases import Field from typing_extensions import Annotated -# from pydantic import StrictStr +from pydantic import StrictStr from typing import Any, Dict, Optional -from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy -from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh -from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 -from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ -from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV -from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa -from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 -from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq -from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 -from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l -from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak -from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm -from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs +from openapi_client.models.aicusapico2ey_mrt6use_ql import Aicusapico2eyMRt6useQL +from openapi_client.models.aicusapico4_lpaf103_dgii import Aicusapico4LPAf103DGIi +from openapi_client.models.aicusapico5_ob_tetko9o_mo import Aicusapico5ObTetko9oMO +from openapi_client.models.aicusapico_dpw375iu4xb1 import AicusapicoDPw375iu4xb1 +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i import AicusapicoHWyvBnB1QggI +from openapi_client.models.aicusapico_k_utg5hw5_mq23 import AicusapicoKUtg5hw5MQ23 +from openapi_client.models.aicusapico_npq1_tceem_sd8 import AicusapicoNPq1TceemSd8 +from openapi_client.models.aicusapico_ozq0ul_og1nr_k import AicusapicoOzq0ulOG1nrK +from openapi_client.models.aicusapico_ta_aup0_rj_hhq0 import AicusapicoTaAUp0RjHHQ0 +from openapi_client.models.aicusapico_ya9_vor_uqinz_f import AicusapicoYa9VOrUQINzF +from openapi_client.models.aicusapico_ze_nx832z_hfgx import AicusapicoZeNx832zHfgx +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r import AicusapicoaOehYyqx8qlR +from openapi_client.models.aicusapicod_betf4_zuz6_wh import AicusapicodBETf4Zuz6WH +from openapi_client.models.aicusapicoqh_vw_ter_avpqm import AicusapicoqhVwTerAVPQm +from openapi_client.models.aicusapicou6_vks_roj90h2 import Aicusapicou6VksROJ90h2 +from openapi_client.models.aicusapicoyip3e_ubuk13_z import Aicusapicoyip3eUBUK13Z from openapi_client.api_client import ApiClient, RequestSerialized from openapi_client.api_response import ApiResponse @@ -105,9 +102,9 @@ def aos_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -169,9 +166,9 @@ def aos_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -233,9 +230,9 @@ def aos_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -584,9 +581,9 @@ def aos_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -648,9 +645,9 @@ def aos_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -712,9 +709,9 @@ def aos_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -830,9 +827,9 @@ def chat_history_messages_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -894,9 +891,9 @@ def chat_history_messages_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -958,9 +955,9 @@ def chat_history_messages_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1542,9 +1539,9 @@ def chat_history_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1606,9 +1603,9 @@ def chat_history_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1670,9 +1667,9 @@ def chat_history_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1788,9 +1785,9 @@ def chat_history_sessions_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1852,9 +1849,9 @@ def chat_history_sessions_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -1916,9 +1913,9 @@ def chat_history_sessions_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -2235,7 +2232,7 @@ def chatbot_management_chatbots_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AicusapicoiXUam8N8Dh8l: + ) -> AicusapicoYa9VOrUQINzF: """chatbot_management_chatbots_get @@ -2275,9 +2272,9 @@ def chatbot_management_chatbots_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoYa9VOrUQINzF", '400': "object", '500': "object", - '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2307,7 +2304,7 @@ def chatbot_management_chatbots_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AicusapicoiXUam8N8Dh8l]: + ) -> ApiResponse[AicusapicoYa9VOrUQINzF]: """chatbot_management_chatbots_get @@ -2347,9 +2344,9 @@ def chatbot_management_chatbots_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoYa9VOrUQINzF", '400': "object", '500': "object", - '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2419,9 +2416,9 @@ def chatbot_management_chatbots_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoYa9VOrUQINzF", '400': "object", '500': "object", - '200': "AicusapicoiXUam8N8Dh8l", } response_data = self.api_client.call_api( *_param, @@ -2734,7 +2731,7 @@ def _chatbot_management_chatbots_options_serialize( @validate_call def chatbot_management_chatbots_post( self, - aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, + aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2747,12 +2744,12 @@ def chatbot_management_chatbots_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AicusapicohQbFv37cvtQS: + ) -> AicusapicoaOehYyqx8qlR: """chatbot_management_chatbots_post - :param aicusapicob_mn2p_lk9_av_e8: (required) - :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 + :param aicusapicou6_vks_roj90h2: (required) + :type aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2776,7 +2773,7 @@ def chatbot_management_chatbots_post( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( - aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, + aicusapicou6_vks_roj90h2=aicusapicou6_vks_roj90h2, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2784,9 +2781,9 @@ def chatbot_management_chatbots_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoaOehYyqx8qlR", '400': "object", '500': "object", - '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2802,7 +2799,7 @@ def chatbot_management_chatbots_post( @validate_call def chatbot_management_chatbots_post_with_http_info( self, - aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, + aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2815,12 +2812,12 @@ def chatbot_management_chatbots_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AicusapicohQbFv37cvtQS]: + ) -> ApiResponse[AicusapicoaOehYyqx8qlR]: """chatbot_management_chatbots_post - :param aicusapicob_mn2p_lk9_av_e8: (required) - :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 + :param aicusapicou6_vks_roj90h2: (required) + :type aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2844,7 +2841,7 @@ def chatbot_management_chatbots_post_with_http_info( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( - aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, + aicusapicou6_vks_roj90h2=aicusapicou6_vks_roj90h2, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2852,9 +2849,9 @@ def chatbot_management_chatbots_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoaOehYyqx8qlR", '400': "object", '500': "object", - '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2870,7 +2867,7 @@ def chatbot_management_chatbots_post_with_http_info( @validate_call def chatbot_management_chatbots_post_without_preload_content( self, - aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8, + aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2887,8 +2884,8 @@ def chatbot_management_chatbots_post_without_preload_content( """chatbot_management_chatbots_post - :param aicusapicob_mn2p_lk9_av_e8: (required) - :type aicusapicob_mn2p_lk9_av_e8: AicusapicobMN2pLK9AvE8 + :param aicusapicou6_vks_roj90h2: (required) + :type aicusapicou6_vks_roj90h2: Aicusapicou6VksROJ90h2 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2912,7 +2909,7 @@ def chatbot_management_chatbots_post_without_preload_content( """ # noqa: E501 _param = self._chatbot_management_chatbots_post_serialize( - aicusapicob_mn2p_lk9_av_e8=aicusapicob_mn2p_lk9_av_e8, + aicusapicou6_vks_roj90h2=aicusapicou6_vks_roj90h2, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2920,9 +2917,9 @@ def chatbot_management_chatbots_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoaOehYyqx8qlR", '400': "object", '500': "object", - '200': "AicusapicohQbFv37cvtQS", } response_data = self.api_client.call_api( *_param, @@ -2933,7 +2930,7 @@ def chatbot_management_chatbots_post_without_preload_content( def _chatbot_management_chatbots_post_serialize( self, - aicusapicob_mn2p_lk9_av_e8, + aicusapicou6_vks_roj90h2, _request_auth, _content_type, _headers, @@ -2957,8 +2954,8 @@ def _chatbot_management_chatbots_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapicob_mn2p_lk9_av_e8 is not None: - _body_params = aicusapicob_mn2p_lk9_av_e8 + if aicusapicou6_vks_roj90h2 is not None: + _body_params = aicusapicou6_vks_roj90h2 # set the HTTP header `Accept` @@ -3287,9 +3284,9 @@ def chatbot_management_check_chatbot_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3351,9 +3348,9 @@ def chatbot_management_check_chatbot_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3415,9 +3412,9 @@ def chatbot_management_check_chatbot_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3533,9 +3530,9 @@ def chatbot_management_check_default_chatbot_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3597,9 +3594,9 @@ def chatbot_management_check_default_chatbot_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -3661,9 +3658,9 @@ def chatbot_management_check_default_chatbot_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -4012,9 +4009,9 @@ def chatbot_management_embeddings_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -4076,9 +4073,9 @@ def chatbot_management_embeddings_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -4140,9 +4137,9 @@ def chatbot_management_embeddings_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -4957,9 +4954,9 @@ def extract_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5021,9 +5018,9 @@ def extract_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5085,9 +5082,9 @@ def extract_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5203,9 +5200,9 @@ def intention_download_template_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5267,9 +5264,9 @@ def intention_download_template_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5331,9 +5328,9 @@ def intention_download_template_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -5869,7 +5866,7 @@ def _intention_execution_presigned_url_options_serialize( @validate_call def intention_execution_presigned_url_post( self, - aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5882,12 +5879,12 @@ def intention_execution_presigned_url_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AicusapicoUy1YBXiWJ5Aq: + ) -> AicusapicoKUtg5hw5MQ23: """intention_execution_presigned_url_post - :param aicusapico_cyd129_m65y_kv: (required) - :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param aicusapico2ey_mrt6use_ql: (required) + :type aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5911,7 +5908,7 @@ def intention_execution_presigned_url_post( """ # noqa: E501 _param = self._intention_execution_presigned_url_post_serialize( - aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + aicusapico2ey_mrt6use_ql=aicusapico2ey_mrt6use_ql, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5919,9 +5916,9 @@ def intention_execution_presigned_url_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoKUtg5hw5MQ23", '400': "object", '500': "object", - '200': "AicusapicoUy1YBXiWJ5Aq", } response_data = self.api_client.call_api( *_param, @@ -5937,7 +5934,7 @@ def intention_execution_presigned_url_post( @validate_call def intention_execution_presigned_url_post_with_http_info( self, - aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -5950,12 +5947,12 @@ def intention_execution_presigned_url_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AicusapicoUy1YBXiWJ5Aq]: + ) -> ApiResponse[AicusapicoKUtg5hw5MQ23]: """intention_execution_presigned_url_post - :param aicusapico_cyd129_m65y_kv: (required) - :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param aicusapico2ey_mrt6use_ql: (required) + :type aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -5979,7 +5976,7 @@ def intention_execution_presigned_url_post_with_http_info( """ # noqa: E501 _param = self._intention_execution_presigned_url_post_serialize( - aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + aicusapico2ey_mrt6use_ql=aicusapico2ey_mrt6use_ql, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -5987,9 +5984,9 @@ def intention_execution_presigned_url_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoKUtg5hw5MQ23", '400': "object", '500': "object", - '200': "AicusapicoUy1YBXiWJ5Aq", } response_data = self.api_client.call_api( *_param, @@ -6005,7 +6002,7 @@ def intention_execution_presigned_url_post_with_http_info( @validate_call def intention_execution_presigned_url_post_without_preload_content( self, - aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV, + aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -6022,8 +6019,8 @@ def intention_execution_presigned_url_post_without_preload_content( """intention_execution_presigned_url_post - :param aicusapico_cyd129_m65y_kv: (required) - :type aicusapico_cyd129_m65y_kv: AicusapicoCyd129M65yKV + :param aicusapico2ey_mrt6use_ql: (required) + :type aicusapico2ey_mrt6use_ql: Aicusapico2eyMRt6useQL :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -6047,7 +6044,7 @@ def intention_execution_presigned_url_post_without_preload_content( """ # noqa: E501 _param = self._intention_execution_presigned_url_post_serialize( - aicusapico_cyd129_m65y_kv=aicusapico_cyd129_m65y_kv, + aicusapico2ey_mrt6use_ql=aicusapico2ey_mrt6use_ql, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -6055,9 +6052,9 @@ def intention_execution_presigned_url_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoKUtg5hw5MQ23", '400': "object", '500': "object", - '200': "AicusapicoUy1YBXiWJ5Aq", } response_data = self.api_client.call_api( *_param, @@ -6068,7 +6065,7 @@ def intention_execution_presigned_url_post_without_preload_content( def _intention_execution_presigned_url_post_serialize( self, - aicusapico_cyd129_m65y_kv, + aicusapico2ey_mrt6use_ql, _request_auth, _content_type, _headers, @@ -6092,8 +6089,8 @@ def _intention_execution_presigned_url_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapico_cyd129_m65y_kv is not None: - _body_params = aicusapico_cyd129_m65y_kv + if aicusapico2ey_mrt6use_ql is not None: + _body_params = aicusapico2ey_mrt6use_ql # set the HTTP header `Accept` @@ -6189,9 +6186,9 @@ def intention_executions_delete( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -6253,9 +6250,9 @@ def intention_executions_delete_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -6317,9 +6314,9 @@ def intention_executions_delete_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -6403,7 +6400,7 @@ def intention_executions_execution_id_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapico35klzY80ikPh: + ) -> AicusapicoqhVwTerAVPQm: """intention_executions_execution_id_get @@ -6443,9 +6440,9 @@ def intention_executions_execution_id_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoqhVwTerAVPQm", '400': "object", '500': "object", - '200': "Aicusapico35klzY80ikPh", } response_data = self.api_client.call_api( *_param, @@ -6475,7 +6472,7 @@ def intention_executions_execution_id_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapico35klzY80ikPh]: + ) -> ApiResponse[AicusapicoqhVwTerAVPQm]: """intention_executions_execution_id_get @@ -6515,9 +6512,9 @@ def intention_executions_execution_id_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoqhVwTerAVPQm", '400': "object", '500': "object", - '200': "Aicusapico35klzY80ikPh", } response_data = self.api_client.call_api( *_param, @@ -6587,9 +6584,9 @@ def intention_executions_execution_id_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoqhVwTerAVPQm", '400': "object", '500': "object", - '200': "Aicusapico35klzY80ikPh", } response_data = self.api_client.call_api( *_param, @@ -6927,7 +6924,7 @@ def intention_executions_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapicob9jxGQ8zv1AS: + ) -> AicusapicoHWyvBnB1QggI: """intention_executions_get @@ -6967,9 +6964,9 @@ def intention_executions_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoHWyvBnB1QggI", '400': "object", '500': "object", - '200': "Aicusapicob9jxGQ8zv1AS", } response_data = self.api_client.call_api( *_param, @@ -6999,7 +6996,7 @@ def intention_executions_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapicob9jxGQ8zv1AS]: + ) -> ApiResponse[AicusapicoHWyvBnB1QggI]: """intention_executions_get @@ -7039,9 +7036,9 @@ def intention_executions_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoHWyvBnB1QggI", '400': "object", '500': "object", - '200': "Aicusapicob9jxGQ8zv1AS", } response_data = self.api_client.call_api( *_param, @@ -7111,9 +7108,9 @@ def intention_executions_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoHWyvBnB1QggI", '400': "object", '500': "object", - '200': "Aicusapicob9jxGQ8zv1AS", } response_data = self.api_client.call_api( *_param, @@ -7426,7 +7423,7 @@ def _intention_executions_options_serialize( @validate_call def intention_executions_post( self, - aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -7439,12 +7436,12 @@ def intention_executions_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapico4rwMspzeBOe5: + ) -> AicusapicoOzq0ulOG1nrK: """intention_executions_post - :param aicusapico_eoc_l_nul8cwxa: (required) - :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param aicusapico_ze_nx832z_hfgx: (required) + :type aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -7468,7 +7465,7 @@ def intention_executions_post( """ # noqa: E501 _param = self._intention_executions_post_serialize( - aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + aicusapico_ze_nx832z_hfgx=aicusapico_ze_nx832z_hfgx, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -7476,9 +7473,9 @@ def intention_executions_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoOzq0ulOG1nrK", '400': "object", '500': "object", - '200': "Aicusapico4rwMspzeBOe5", } response_data = self.api_client.call_api( *_param, @@ -7494,7 +7491,7 @@ def intention_executions_post( @validate_call def intention_executions_post_with_http_info( self, - aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -7507,12 +7504,12 @@ def intention_executions_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapico4rwMspzeBOe5]: + ) -> ApiResponse[AicusapicoOzq0ulOG1nrK]: """intention_executions_post - :param aicusapico_eoc_l_nul8cwxa: (required) - :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param aicusapico_ze_nx832z_hfgx: (required) + :type aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -7536,7 +7533,7 @@ def intention_executions_post_with_http_info( """ # noqa: E501 _param = self._intention_executions_post_serialize( - aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + aicusapico_ze_nx832z_hfgx=aicusapico_ze_nx832z_hfgx, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -7544,9 +7541,9 @@ def intention_executions_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoOzq0ulOG1nrK", '400': "object", '500': "object", - '200': "Aicusapico4rwMspzeBOe5", } response_data = self.api_client.call_api( *_param, @@ -7562,7 +7559,7 @@ def intention_executions_post_with_http_info( @validate_call def intention_executions_post_without_preload_content( self, - aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa, + aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -7579,8 +7576,8 @@ def intention_executions_post_without_preload_content( """intention_executions_post - :param aicusapico_eoc_l_nul8cwxa: (required) - :type aicusapico_eoc_l_nul8cwxa: AicusapicoEOcLNul8cwxa + :param aicusapico_ze_nx832z_hfgx: (required) + :type aicusapico_ze_nx832z_hfgx: AicusapicoZeNx832zHfgx :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -7604,7 +7601,7 @@ def intention_executions_post_without_preload_content( """ # noqa: E501 _param = self._intention_executions_post_serialize( - aicusapico_eoc_l_nul8cwxa=aicusapico_eoc_l_nul8cwxa, + aicusapico_ze_nx832z_hfgx=aicusapico_ze_nx832z_hfgx, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -7612,9 +7609,9 @@ def intention_executions_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoOzq0ulOG1nrK", '400': "object", '500': "object", - '200': "Aicusapico4rwMspzeBOe5", } response_data = self.api_client.call_api( *_param, @@ -7625,7 +7622,7 @@ def intention_executions_post_without_preload_content( def _intention_executions_post_serialize( self, - aicusapico_eoc_l_nul8cwxa, + aicusapico_ze_nx832z_hfgx, _request_auth, _content_type, _headers, @@ -7649,8 +7646,8 @@ def _intention_executions_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapico_eoc_l_nul8cwxa is not None: - _body_params = aicusapico_eoc_l_nul8cwxa + if aicusapico_ze_nx832z_hfgx is not None: + _body_params = aicusapico_ze_nx832z_hfgx # set the HTTP header `Accept` @@ -7979,9 +7976,9 @@ def intention_index_used_scan_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -8043,9 +8040,9 @@ def intention_index_used_scan_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -8107,9 +8104,9 @@ def intention_index_used_scan_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -8412,7 +8409,7 @@ def _intention_options_serialize( @validate_call def knowledge_base_executions_delete( self, - aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, + aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -8425,12 +8422,12 @@ def knowledge_base_executions_delete( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapicoqew7t5vTA2ak: + ) -> Aicusapicoyip3eUBUK13Z: """knowledge_base_executions_delete - :param aicusapicor1_kt5_c2m_lnkm: (required) - :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm + :param aicusapico_dpw375iu4xb1: (required) + :type aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -8454,7 +8451,7 @@ def knowledge_base_executions_delete( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, + aicusapico_dpw375iu4xb1=aicusapico_dpw375iu4xb1, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -8462,9 +8459,9 @@ def knowledge_base_executions_delete( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapicoyip3eUBUK13Z", '400': "object", '500': "object", - '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -8480,7 +8477,7 @@ def knowledge_base_executions_delete( @validate_call def knowledge_base_executions_delete_with_http_info( self, - aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, + aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -8493,12 +8490,12 @@ def knowledge_base_executions_delete_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapicoqew7t5vTA2ak]: + ) -> ApiResponse[Aicusapicoyip3eUBUK13Z]: """knowledge_base_executions_delete - :param aicusapicor1_kt5_c2m_lnkm: (required) - :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm + :param aicusapico_dpw375iu4xb1: (required) + :type aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -8522,7 +8519,7 @@ def knowledge_base_executions_delete_with_http_info( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, + aicusapico_dpw375iu4xb1=aicusapico_dpw375iu4xb1, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -8530,9 +8527,9 @@ def knowledge_base_executions_delete_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapicoyip3eUBUK13Z", '400': "object", '500': "object", - '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -8548,7 +8545,7 @@ def knowledge_base_executions_delete_with_http_info( @validate_call def knowledge_base_executions_delete_without_preload_content( self, - aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm, + aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -8565,8 +8562,8 @@ def knowledge_base_executions_delete_without_preload_content( """knowledge_base_executions_delete - :param aicusapicor1_kt5_c2m_lnkm: (required) - :type aicusapicor1_kt5_c2m_lnkm: Aicusapicor1Kt5C2mLnkm + :param aicusapico_dpw375iu4xb1: (required) + :type aicusapico_dpw375iu4xb1: AicusapicoDPw375iu4xb1 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -8590,7 +8587,7 @@ def knowledge_base_executions_delete_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_executions_delete_serialize( - aicusapicor1_kt5_c2m_lnkm=aicusapicor1_kt5_c2m_lnkm, + aicusapico_dpw375iu4xb1=aicusapico_dpw375iu4xb1, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -8598,9 +8595,9 @@ def knowledge_base_executions_delete_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapicoyip3eUBUK13Z", '400': "object", '500': "object", - '200': "Aicusapicoqew7t5vTA2ak", } response_data = self.api_client.call_api( *_param, @@ -8611,7 +8608,7 @@ def knowledge_base_executions_delete_without_preload_content( def _knowledge_base_executions_delete_serialize( self, - aicusapicor1_kt5_c2m_lnkm, + aicusapico_dpw375iu4xb1, _request_auth, _content_type, _headers, @@ -8635,8 +8632,8 @@ def _knowledge_base_executions_delete_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapicor1_kt5_c2m_lnkm is not None: - _body_params = aicusapicor1_kt5_c2m_lnkm + if aicusapico_dpw375iu4xb1 is not None: + _body_params = aicusapico_dpw375iu4xb1 # set the HTTP header `Accept` @@ -8699,7 +8696,7 @@ def knowledge_base_executions_execution_id_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AicusapicoQjcoKzzZFI86: + ) -> Aicusapico5ObTetko9oMO: """knowledge_base_executions_execution_id_get @@ -8736,9 +8733,9 @@ def knowledge_base_executions_execution_id_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico5ObTetko9oMO", '400': "object", '500': "object", - '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -8767,7 +8764,7 @@ def knowledge_base_executions_execution_id_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AicusapicoQjcoKzzZFI86]: + ) -> ApiResponse[Aicusapico5ObTetko9oMO]: """knowledge_base_executions_execution_id_get @@ -8804,9 +8801,9 @@ def knowledge_base_executions_execution_id_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico5ObTetko9oMO", '400': "object", '500': "object", - '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -8872,9 +8869,9 @@ def knowledge_base_executions_execution_id_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico5ObTetko9oMO", '400': "object", '500': "object", - '200': "AicusapicoQjcoKzzZFI86", } response_data = self.api_client.call_api( *_param, @@ -9209,7 +9206,7 @@ def knowledge_base_executions_get( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapico2TwvXbhsTncy: + ) -> AicusapicoNPq1TceemSd8: """knowledge_base_executions_get @@ -9249,9 +9246,9 @@ def knowledge_base_executions_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoNPq1TceemSd8", '400': "object", '500': "object", - '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -9281,7 +9278,7 @@ def knowledge_base_executions_get_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapico2TwvXbhsTncy]: + ) -> ApiResponse[AicusapicoNPq1TceemSd8]: """knowledge_base_executions_get @@ -9321,9 +9318,9 @@ def knowledge_base_executions_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoNPq1TceemSd8", '400': "object", '500': "object", - '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -9393,9 +9390,9 @@ def knowledge_base_executions_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "AicusapicoNPq1TceemSd8", '400': "object", '500': "object", - '200': "Aicusapico2TwvXbhsTncy", } response_data = self.api_client.call_api( *_param, @@ -9708,7 +9705,7 @@ def _knowledge_base_executions_options_serialize( @validate_call def knowledge_base_executions_post( self, - aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, + aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -9725,8 +9722,8 @@ def knowledge_base_executions_post( """knowledge_base_executions_post - :param aicusapicose_oar_xmrp_sns: (required) - :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs + :param aicusapico_ta_aup0_rj_hhq0: (required) + :type aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -9750,7 +9747,7 @@ def knowledge_base_executions_post( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, + aicusapico_ta_aup0_rj_hhq0=aicusapico_ta_aup0_rj_hhq0, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -9758,9 +9755,9 @@ def knowledge_base_executions_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -9776,7 +9773,7 @@ def knowledge_base_executions_post( @validate_call def knowledge_base_executions_post_with_http_info( self, - aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, + aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -9793,8 +9790,8 @@ def knowledge_base_executions_post_with_http_info( """knowledge_base_executions_post - :param aicusapicose_oar_xmrp_sns: (required) - :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs + :param aicusapico_ta_aup0_rj_hhq0: (required) + :type aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -9818,7 +9815,7 @@ def knowledge_base_executions_post_with_http_info( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, + aicusapico_ta_aup0_rj_hhq0=aicusapico_ta_aup0_rj_hhq0, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -9826,9 +9823,9 @@ def knowledge_base_executions_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -9844,7 +9841,7 @@ def knowledge_base_executions_post_with_http_info( @validate_call def knowledge_base_executions_post_without_preload_content( self, - aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs, + aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -9861,8 +9858,8 @@ def knowledge_base_executions_post_without_preload_content( """knowledge_base_executions_post - :param aicusapicose_oar_xmrp_sns: (required) - :type aicusapicose_oar_xmrp_sns: AicusapicoseOArXMRpSNs + :param aicusapico_ta_aup0_rj_hhq0: (required) + :type aicusapico_ta_aup0_rj_hhq0: AicusapicoTaAUp0RjHHQ0 :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -9886,7 +9883,7 @@ def knowledge_base_executions_post_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_executions_post_serialize( - aicusapicose_oar_xmrp_sns=aicusapicose_oar_xmrp_sns, + aicusapico_ta_aup0_rj_hhq0=aicusapico_ta_aup0_rj_hhq0, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -9894,9 +9891,9 @@ def knowledge_base_executions_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -9907,7 +9904,7 @@ def knowledge_base_executions_post_without_preload_content( def _knowledge_base_executions_post_serialize( self, - aicusapicose_oar_xmrp_sns, + aicusapico_ta_aup0_rj_hhq0, _request_auth, _content_type, _headers, @@ -9931,8 +9928,8 @@ def _knowledge_base_executions_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapicose_oar_xmrp_sns is not None: - _body_params = aicusapicose_oar_xmrp_sns + if aicusapico_ta_aup0_rj_hhq0 is not None: + _body_params = aicusapico_ta_aup0_rj_hhq0 # set the HTTP header `Accept` @@ -10215,7 +10212,7 @@ def _knowledge_base_kb_presigned_url_options_serialize( @validate_call def knowledge_base_kb_presigned_url_post( self, - aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, + aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -10228,12 +10225,12 @@ def knowledge_base_kb_presigned_url_post( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> Aicusapicoh5w3FRwxBjhG: + ) -> Aicusapico4LPAf103DGIi: """knowledge_base_kb_presigned_url_post - :param aicusapico51_raf_cay_oxi_z: (required) - :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ + :param aicusapicod_betf4_zuz6_wh: (required) + :type aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -10257,7 +10254,7 @@ def knowledge_base_kb_presigned_url_post( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, + aicusapicod_betf4_zuz6_wh=aicusapicod_betf4_zuz6_wh, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -10265,9 +10262,9 @@ def knowledge_base_kb_presigned_url_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico4LPAf103DGIi", '400': "object", '500': "object", - '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -10283,7 +10280,7 @@ def knowledge_base_kb_presigned_url_post( @validate_call def knowledge_base_kb_presigned_url_post_with_http_info( self, - aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, + aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -10296,12 +10293,12 @@ def knowledge_base_kb_presigned_url_post_with_http_info( _content_type: Optional[StrictStr] = None, _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[Aicusapicoh5w3FRwxBjhG]: + ) -> ApiResponse[Aicusapico4LPAf103DGIi]: """knowledge_base_kb_presigned_url_post - :param aicusapico51_raf_cay_oxi_z: (required) - :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ + :param aicusapicod_betf4_zuz6_wh: (required) + :type aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -10325,7 +10322,7 @@ def knowledge_base_kb_presigned_url_post_with_http_info( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, + aicusapicod_betf4_zuz6_wh=aicusapicod_betf4_zuz6_wh, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -10333,9 +10330,9 @@ def knowledge_base_kb_presigned_url_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico4LPAf103DGIi", '400': "object", '500': "object", - '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -10351,7 +10348,7 @@ def knowledge_base_kb_presigned_url_post_with_http_info( @validate_call def knowledge_base_kb_presigned_url_post_without_preload_content( self, - aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ, + aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -10368,8 +10365,8 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( """knowledge_base_kb_presigned_url_post - :param aicusapico51_raf_cay_oxi_z: (required) - :type aicusapico51_raf_cay_oxi_z: Aicusapico51RafCAYOxiZ + :param aicusapicod_betf4_zuz6_wh: (required) + :type aicusapicod_betf4_zuz6_wh: AicusapicodBETf4Zuz6WH :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -10393,7 +10390,7 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( """ # noqa: E501 _param = self._knowledge_base_kb_presigned_url_post_serialize( - aicusapico51_raf_cay_oxi_z=aicusapico51_raf_cay_oxi_z, + aicusapicod_betf4_zuz6_wh=aicusapicod_betf4_zuz6_wh, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -10401,9 +10398,9 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "Aicusapico4LPAf103DGIi", '400': "object", '500': "object", - '200': "Aicusapicoh5w3FRwxBjhG", } response_data = self.api_client.call_api( *_param, @@ -10414,7 +10411,7 @@ def knowledge_base_kb_presigned_url_post_without_preload_content( def _knowledge_base_kb_presigned_url_post_serialize( self, - aicusapico51_raf_cay_oxi_z, + aicusapicod_betf4_zuz6_wh, _request_auth, _content_type, _headers, @@ -10438,8 +10435,8 @@ def _knowledge_base_kb_presigned_url_post_serialize( # process the header parameters # process the form parameters # process the body parameter - if aicusapico51_raf_cay_oxi_z is not None: - _body_params = aicusapico51_raf_cay_oxi_z + if aicusapicod_betf4_zuz6_wh is not None: + _body_params = aicusapicod_betf4_zuz6_wh # set the HTTP header `Accept` @@ -11001,9 +10998,9 @@ def llm_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11065,9 +11062,9 @@ def llm_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11129,9 +11126,9 @@ def llm_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11247,9 +11244,9 @@ def prompt_management_models_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11311,9 +11308,9 @@ def prompt_management_models_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11375,9 +11372,9 @@ def prompt_management_models_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -11959,9 +11956,9 @@ def prompt_management_prompts_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12023,9 +12020,9 @@ def prompt_management_prompts_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12087,9 +12084,9 @@ def prompt_management_prompts_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12438,9 +12435,9 @@ def prompt_management_prompts_post( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12502,9 +12499,9 @@ def prompt_management_prompts_post_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12566,9 +12563,9 @@ def prompt_management_prompts_post_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12684,9 +12681,9 @@ def prompt_management_scenes_get( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12748,9 +12745,9 @@ def prompt_management_scenes_get_with_http_info( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, @@ -12812,9 +12809,9 @@ def prompt_management_scenes_get_without_preload_content( ) _response_types_map: Dict[str, Optional[str]] = { + '200': "object", '400': "object", '500': "object", - '200': "object", } response_data = self.api_client.call_api( *_param, diff --git a/api_test/biz_logic/rest_api/openapi_client/api_client.py b/api_test/biz_logic/rest_api/openapi_client/api_client.py index 037e911fd..6bb2151d4 100644 --- a/api_test/biz_logic/rest_api/openapi_client/api_client.py +++ b/api_test/biz_logic/rest_api/openapi_client/api_client.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/biz_logic/rest_api/openapi_client/configuration.py b/api_test/biz_logic/rest_api/openapi_client/configuration.py index 739debbe1..dc431f873 100644 --- a/api_test/biz_logic/rest_api/openapi_client/configuration.py +++ b/api_test/biz_logic/rest_api/openapi_client/configuration.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -89,7 +89,7 @@ def __init__(self, host=None, ) -> None: """Constructor """ - self._base_path = "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/prod" if host is None else host + self._base_path = "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod" if host is None else host """Default Base url """ self.server_index = 0 if server_index is None and host is None else server_index @@ -398,7 +398,7 @@ def to_debug_report(self): return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ - "Version of the API: 2024-10-21T08:32:58Z\n"\ + "Version of the API: 2024-10-24T04:30:07Z\n"\ "SDK Package Version: 1.0.0".\ format(env=sys.platform, pyversion=sys.version) @@ -409,14 +409,8 @@ def get_host_settings(self): """ return [ { - 'url': "https://vbjlqj8va0.execute-api.us-east-1.amazonaws.com/{basePath}", + 'url': "https://c63g9uqsze.execute-api.us-east-1.amazonaws.com/prod", 'description': "No description provided", - 'variables': { - 'basePath': { - 'description': "No description provided", - 'default_value': "prod", - } - } } ] diff --git a/api_test/biz_logic/rest_api/openapi_client/exceptions.py b/api_test/biz_logic/rest_api/openapi_client/exceptions.py index 95553a825..a4ea7d2c2 100644 --- a/api_test/biz_logic/rest_api/openapi_client/exceptions.py +++ b/api_test/biz_logic/rest_api/openapi_client/exceptions.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/biz_logic/rest_api/openapi_client/models/__init__.py b/api_test/biz_logic/rest_api/openapi_client/models/__init__.py index 2d63e90af..8c0c78cb1 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/__init__.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/__init__.py @@ -6,7 +6,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -14,29 +14,30 @@ # import models into model package -from openapi_client.models.aicusapico2_twv_xbhs_tncy import Aicusapico2TwvXbhsTncy -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig -from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner -from openapi_client.models.aicusapico35klz_y80ik_ph import Aicusapico35klzY80ikPh -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner -from openapi_client.models.aicusapico4rw_mspze_boe5 import Aicusapico4rwMspzeBOe5 -from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload -from openapi_client.models.aicusapico51_raf_cay_oxi_z import Aicusapico51RafCAYOxiZ -from openapi_client.models.aicusapico_cyd129_m65y_kv import AicusapicoCyd129M65yKV -from openapi_client.models.aicusapico_eoc_l_nul8cwxa import AicusapicoEOcLNul8cwxa -from openapi_client.models.aicusapico_qjco_kzz_zfi86 import AicusapicoQjcoKzzZFI86 -from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner -from openapi_client.models.aicusapico_uy1_ybxi_wj5_aq import AicusapicoUy1YBXiWJ5Aq -from openapi_client.models.aicusapicob9jx_gq8zv1_as import Aicusapicob9jxGQ8zv1AS -from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8 import AicusapicobMN2pLK9AvE8 -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index -from openapi_client.models.aicusapicoh5w3_f_rwx_bjh_g import Aicusapicoh5w3FRwxBjhG -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs import AicusapicohQbFv37cvtQS -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l import AicusapicoiXUam8N8Dh8l -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner -from openapi_client.models.aicusapicoqew7t5v_ta2ak import Aicusapicoqew7t5vTA2ak -from openapi_client.models.aicusapicor1_kt5_c2m_lnkm import Aicusapicor1Kt5C2mLnkm -from openapi_client.models.aicusapicose_oar_xmrp_sns import AicusapicoseOArXMRpSNs +from openapi_client.models.aicusapico2ey_mrt6use_ql import Aicusapico2eyMRt6useQL +from openapi_client.models.aicusapico4_lpaf103_dgii import Aicusapico4LPAf103DGIi +from openapi_client.models.aicusapico4_lpaf103_dgii_data import Aicusapico4LPAf103DGIiData +from openapi_client.models.aicusapico5_ob_tetko9o_mo import Aicusapico5ObTetko9oMO +from openapi_client.models.aicusapico5_ob_tetko9o_mo_items_inner import Aicusapico5ObTetko9oMOItemsInner +from openapi_client.models.aicusapico_dpw375iu4xb1 import AicusapicoDPw375iu4xb1 +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i import AicusapicoHWyvBnB1QggI +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_items_inner import AicusapicoHWyvBnB1QggIItemsInner +from openapi_client.models.aicusapico_k_utg5hw5_mq23 import AicusapicoKUtg5hw5MQ23 +from openapi_client.models.aicusapico_npq1_tceem_sd8 import AicusapicoNPq1TceemSd8 +from openapi_client.models.aicusapico_npq1_tceem_sd8_items_inner import AicusapicoNPq1TceemSd8ItemsInner +from openapi_client.models.aicusapico_ozq0ul_og1nr_k import AicusapicoOzq0ulOG1nrK +from openapi_client.models.aicusapico_ozq0ul_og1nr_k_input_payload import AicusapicoOzq0ulOG1nrKInputPayload +from openapi_client.models.aicusapico_ta_aup0_rj_hhq0 import AicusapicoTaAUp0RjHHQ0 +from openapi_client.models.aicusapico_ya9_vor_uqinz_f import AicusapicoYa9VOrUQINzF +from openapi_client.models.aicusapico_ya9_vor_uqinz_f_items_inner import AicusapicoYa9VOrUQINzFItemsInner +from openapi_client.models.aicusapico_ze_nx832z_hfgx import AicusapicoZeNx832zHfgx +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r import AicusapicoaOehYyqx8qlR +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r_index_ids import AicusapicoaOehYyqx8qlRIndexIds +from openapi_client.models.aicusapicod_betf4_zuz6_wh import AicusapicodBETf4Zuz6WH +from openapi_client.models.aicusapicoqh_vw_ter_avpqm import AicusapicoqhVwTerAVPQm +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner import AicusapicoqhVwTerAVPQmItemsInner +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner import AicusapicoqhVwTerAVPQmItemsInnerQAListInner +from openapi_client.models.aicusapicou6_vks_roj90h2 import Aicusapicou6VksROJ90h2 +from openapi_client.models.aicusapicou6_vks_roj90h2_index import Aicusapicou6VksROJ90h2Index +from openapi_client.models.aicusapicoyip3e_ubuk13_z import Aicusapicoyip3eUBUK13Z diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2ey_mrt6use_ql.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico2ey_mrt6use_ql.py index 775605668..727e621d9 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico51_raf_cay_oxi_z.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2ey_mrt6use_ql.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapico51RafCAYOxiZ(BaseModel): +class Aicusapico2eyMRt6useQL(BaseModel): """ - Aicusapico51RafCAYOxiZ + Aicusapico2eyMRt6useQL """ # noqa: E501 content_type: StrictStr file_name: StrictStr @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico51RafCAYOxiZ from a JSON string""" + """Create an instance of Aicusapico2eyMRt6useQL from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico51RafCAYOxiZ from a dict""" + """Create an instance of Aicusapico2eyMRt6useQL from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii.py new file mode 100644 index 000000000..771142303 --- /dev/null +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + aics-api + + AI-Customer-Service - Core API + + The version of the OpenAPI document: 2024-10-24T04:30:07Z + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from openapi_client.models.aicusapico4_lpaf103_dgii_data import Aicusapico4LPAf103DGIiData +from typing import Optional, Set +from typing_extensions import Self + +class Aicusapico4LPAf103DGIi(BaseModel): + """ + Aicusapico4LPAf103DGIi + """ # noqa: E501 + data: Optional[Aicusapico4LPAf103DGIiData] = None + message: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["data", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Aicusapico4LPAf103DGIi from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Aicusapico4LPAf103DGIi from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": Aicusapico4LPAf103DGIiData.from_dict(obj["data"]) if obj.get("data") is not None else None, + "message": obj.get("message") + }) + return _obj + + diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii_data.py similarity index 80% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii_data.py index 64952c6b2..0fa2d7889 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_uy1_ybxi_wj5_aq.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4_lpaf103_dgii_data.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,15 +22,14 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoUy1YBXiWJ5Aq(BaseModel): +class Aicusapico4LPAf103DGIiData(BaseModel): """ - AicusapicoUy1YBXiWJ5Aq + Aicusapico4LPAf103DGIiData """ # noqa: E501 - data: Optional[StrictStr] = None - message: Optional[StrictStr] = None s3_prefix: Optional[StrictStr] = Field(default=None, alias="s3Prefix") s3_bucket: Optional[StrictStr] = Field(default=None, alias="s3Bucket") - __properties: ClassVar[List[str]] = ["data", "message", "s3Prefix", "s3Bucket"] + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["s3Prefix", "s3Bucket", "url"] model_config = ConfigDict( populate_by_name=True, @@ -50,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoUy1YBXiWJ5Aq from a JSON string""" + """Create an instance of Aicusapico4LPAf103DGIiData from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -75,7 +74,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoUy1YBXiWJ5Aq from a dict""" + """Create an instance of Aicusapico4LPAf103DGIiData from a dict""" if obj is None: return None @@ -83,10 +82,9 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "data": obj.get("data"), - "message": obj.get("message"), "s3Prefix": obj.get("s3Prefix"), - "s3Bucket": obj.get("s3Bucket") + "s3Bucket": obj.get("s3Bucket"), + "url": obj.get("url") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo.py similarity index 84% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo.py index d3933ef75..be8493bce 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,15 +19,15 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico_qjco_kzz_zfi86_items_inner import AicusapicoQjcoKzzZFI86ItemsInner +from openapi_client.models.aicusapico5_ob_tetko9o_mo_items_inner import Aicusapico5ObTetko9oMOItemsInner from typing import Optional, Set from typing_extensions import Self -class AicusapicoQjcoKzzZFI86(BaseModel): +class Aicusapico5ObTetko9oMO(BaseModel): """ - AicusapicoQjcoKzzZFI86 + Aicusapico5ObTetko9oMO """ # noqa: E501 - items: Optional[List[AicusapicoQjcoKzzZFI86ItemsInner]] = Field(default=None, alias="Items") + items: Optional[List[Aicusapico5ObTetko9oMOItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Items", "Count"] @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoQjcoKzzZFI86 from a JSON string""" + """Create an instance of Aicusapico5ObTetko9oMO from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoQjcoKzzZFI86 from a dict""" + """Create an instance of Aicusapico5ObTetko9oMO from a dict""" if obj is None: return None @@ -89,7 +89,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Items": [AicusapicoQjcoKzzZFI86ItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Items": [Aicusapico5ObTetko9oMOItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo_items_inner.py similarity index 90% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo_items_inner.py index b531669c3..475c46f0f 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_qjco_kzz_zfi86_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico5_ob_tetko9o_mo_items_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoQjcoKzzZFI86ItemsInner(BaseModel): +class Aicusapico5ObTetko9oMOItemsInner(BaseModel): """ - AicusapicoQjcoKzzZFI86ItemsInner + Aicusapico5ObTetko9oMOItemsInner """ # noqa: E501 execution_id: StrictStr = Field(alias="executionId") s3_path: StrictStr = Field(alias="s3Path") @@ -52,7 +52,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a JSON string""" + """Create an instance of Aicusapico5ObTetko9oMOItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -77,7 +77,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoQjcoKzzZFI86ItemsInner from a dict""" + """Create an instance of Aicusapico5ObTetko9oMOItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_dpw375iu4xb1.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_dpw375iu4xb1.py index 505f3ef51..3fa79dd71 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicor1_kt5_c2m_lnkm.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_dpw375iu4xb1.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapicor1Kt5C2mLnkm(BaseModel): +class AicusapicoDPw375iu4xb1(BaseModel): """ - Aicusapicor1Kt5C2mLnkm + AicusapicoDPw375iu4xb1 """ # noqa: E501 execution_id: List[StrictStr] = Field(alias="executionId") __properties: ClassVar[List[str]] = ["executionId"] @@ -47,7 +47,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapicor1Kt5C2mLnkm from a JSON string""" + """Create an instance of AicusapicoDPw375iu4xb1 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -72,7 +72,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapicor1Kt5C2mLnkm from a dict""" + """Create an instance of AicusapicoDPw375iu4xb1 from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i.py similarity index 80% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i.py index d8902eacd..9628d5581 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,17 +19,17 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig -from openapi_client.models.aicusapicob9jx_gq8zv1_as_items_inner import Aicusapicob9jxGQ8zv1ASItemsInner +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_items_inner import AicusapicoHWyvBnB1QggIItemsInner from typing import Optional, Set from typing_extensions import Self -class Aicusapicob9jxGQ8zv1AS(BaseModel): +class AicusapicoHWyvBnB1QggI(BaseModel): """ - Aicusapicob9jxGQ8zv1AS + AicusapicoHWyvBnB1QggI """ # noqa: E501 - config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") - items: Optional[List[Aicusapicob9jxGQ8zv1ASItemsInner]] = Field(default=None, alias="Items") + config: Optional[AicusapicoHWyvBnB1QggIConfig] = Field(default=None, alias="Config") + items: Optional[List[AicusapicoHWyvBnB1QggIItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Config", "Items", "Count"] @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapicob9jxGQ8zv1AS from a JSON string""" + """Create an instance of AicusapicoHWyvBnB1QggI from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -86,7 +86,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapicob9jxGQ8zv1AS from a dict""" + """Create an instance of AicusapicoHWyvBnB1QggI from a dict""" if obj is None: return None @@ -94,8 +94,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, - "Items": [Aicusapicob9jxGQ8zv1ASItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Config": AicusapicoHWyvBnB1QggIConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [AicusapicoHWyvBnB1QggIItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_config.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_config.py index aab1cae83..4d59a2526 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_config.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_config.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapico2TwvXbhsTncyConfig(BaseModel): +class AicusapicoHWyvBnB1QggIConfig(BaseModel): """ - Aicusapico2TwvXbhsTncyConfig + AicusapicoHWyvBnB1QggIConfig """ # noqa: E501 page_size: Optional[StrictInt] = Field(default=None, alias="PageSize") max_items: Optional[StrictInt] = Field(default=None, alias="MaxItems") @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncyConfig from a JSON string""" + """Create an instance of AicusapicoHWyvBnB1QggIConfig from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncyConfig from a dict""" + """Create an instance of AicusapicoHWyvBnB1QggIConfig from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_items_inner.py similarity index 91% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_items_inner.py index a902314a3..d0be4f097 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob9jx_gq8zv1_as_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_h_wyv_bn_b1_qgg_i_items_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapicob9jxGQ8zv1ASItemsInner(BaseModel): +class AicusapicoHWyvBnB1QggIItemsInner(BaseModel): """ - Aicusapicob9jxGQ8zv1ASItemsInner + AicusapicoHWyvBnB1QggIItemsInner """ # noqa: E501 execution_id: StrictStr = Field(alias="executionId") file_name: StrictStr = Field(alias="fileName") @@ -56,7 +56,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a JSON string""" + """Create an instance of AicusapicoHWyvBnB1QggIItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapicob9jxGQ8zv1ASItemsInner from a dict""" + """Create an instance of AicusapicoHWyvBnB1QggIItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_k_utg5hw5_mq23.py similarity index 90% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_k_utg5hw5_mq23.py index ba6a208d9..1f515a472 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh5w3_f_rwx_bjh_g.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_k_utg5hw5_mq23.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapicoh5w3FRwxBjhG(BaseModel): +class AicusapicoKUtg5hw5MQ23(BaseModel): """ - Aicusapicoh5w3FRwxBjhG + AicusapicoKUtg5hw5MQ23 """ # noqa: E501 data: Optional[StrictStr] = None message: Optional[StrictStr] = None @@ -50,7 +50,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapicoh5w3FRwxBjhG from a JSON string""" + """Create an instance of AicusapicoKUtg5hw5MQ23 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -75,7 +75,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapicoh5w3FRwxBjhG from a dict""" + """Create an instance of AicusapicoKUtg5hw5MQ23 from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8.py similarity index 80% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8.py index 8e2e81c90..6713bc63a 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,17 +19,17 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig -from openapi_client.models.aicusapico2_twv_xbhs_tncy_items_inner import Aicusapico2TwvXbhsTncyItemsInner +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig +from openapi_client.models.aicusapico_npq1_tceem_sd8_items_inner import AicusapicoNPq1TceemSd8ItemsInner from typing import Optional, Set from typing_extensions import Self -class Aicusapico2TwvXbhsTncy(BaseModel): +class AicusapicoNPq1TceemSd8(BaseModel): """ - Aicusapico2TwvXbhsTncy + AicusapicoNPq1TceemSd8 """ # noqa: E501 - config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") - items: Optional[List[Aicusapico2TwvXbhsTncyItemsInner]] = Field(default=None, alias="Items") + config: Optional[AicusapicoHWyvBnB1QggIConfig] = Field(default=None, alias="Config") + items: Optional[List[AicusapicoNPq1TceemSd8ItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Config", "Items", "Count"] @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncy from a JSON string""" + """Create an instance of AicusapicoNPq1TceemSd8 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -86,7 +86,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncy from a dict""" + """Create an instance of AicusapicoNPq1TceemSd8 from a dict""" if obj is None: return None @@ -94,8 +94,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, - "Items": [Aicusapico2TwvXbhsTncyItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Config": AicusapicoHWyvBnB1QggIConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [AicusapicoNPq1TceemSd8ItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8_items_inner.py similarity index 93% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8_items_inner.py index 53a4fb6d3..d9d473417 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico2_twv_xbhs_tncy_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_npq1_tceem_sd8_items_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapico2TwvXbhsTncyItemsInner(BaseModel): +class AicusapicoNPq1TceemSd8ItemsInner(BaseModel): """ - Aicusapico2TwvXbhsTncyItemsInner + AicusapicoNPq1TceemSd8ItemsInner """ # noqa: E501 execution_status: StrictStr = Field(alias="executionStatus") s3_prefix: StrictStr = Field(alias="s3Prefix") @@ -61,7 +61,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncyItemsInner from a JSON string""" + """Create an instance of AicusapicoNPq1TceemSd8ItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -86,7 +86,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico2TwvXbhsTncyItemsInner from a dict""" + """Create an instance of AicusapicoNPq1TceemSd8ItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k.py similarity index 84% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k.py index 375b8c5b8..fe37aa928 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,17 +19,17 @@ from pydantic import BaseModel, ConfigDict, StrictStr from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico4rw_mspze_boe5_input_payload import Aicusapico4rwMspzeBOe5InputPayload +from openapi_client.models.aicusapico_ozq0ul_og1nr_k_input_payload import AicusapicoOzq0ulOG1nrKInputPayload from typing import Optional, Set from typing_extensions import Self -class Aicusapico4rwMspzeBOe5(BaseModel): +class AicusapicoOzq0ulOG1nrK(BaseModel): """ - Aicusapico4rwMspzeBOe5 + AicusapicoOzq0ulOG1nrK """ # noqa: E501 result: Optional[StrictStr] = None execution_id: Optional[StrictStr] = None - input_payload: Optional[Aicusapico4rwMspzeBOe5InputPayload] = None + input_payload: Optional[AicusapicoOzq0ulOG1nrKInputPayload] = None __properties: ClassVar[List[str]] = ["result", "execution_id", "input_payload"] model_config = ConfigDict( @@ -50,7 +50,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico4rwMspzeBOe5 from a JSON string""" + """Create an instance of AicusapicoOzq0ulOG1nrK from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -78,7 +78,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico4rwMspzeBOe5 from a dict""" + """Create an instance of AicusapicoOzq0ulOG1nrK from a dict""" if obj is None: return None @@ -88,7 +88,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "result": obj.get("result"), "execution_id": obj.get("execution_id"), - "input_payload": Aicusapico4rwMspzeBOe5InputPayload.from_dict(obj["input_payload"]) if obj.get("input_payload") is not None else None + "input_payload": AicusapicoOzq0ulOG1nrKInputPayload.from_dict(obj["input_payload"]) if obj.get("input_payload") is not None else None }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k_input_payload.py similarity index 91% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k_input_payload.py index 960dbfddd..e73c596f4 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico4rw_mspze_boe5_input_payload.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ozq0ul_og1nr_k_input_payload.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapico4rwMspzeBOe5InputPayload(BaseModel): +class AicusapicoOzq0ulOG1nrKInputPayload(BaseModel): """ - Aicusapico4rwMspzeBOe5InputPayload + AicusapicoOzq0ulOG1nrKInputPayload """ # noqa: E501 chatbot_id: Optional[StrictStr] = Field(default=None, alias="chatbotId") group_name: Optional[StrictStr] = Field(default=None, alias="groupName") @@ -52,7 +52,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico4rwMspzeBOe5InputPayload from a JSON string""" + """Create an instance of AicusapicoOzq0ulOG1nrKInputPayload from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -77,7 +77,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico4rwMspzeBOe5InputPayload from a dict""" + """Create an instance of AicusapicoOzq0ulOG1nrKInputPayload from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ta_aup0_rj_hhq0.py similarity index 91% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ta_aup0_rj_hhq0.py index e63f89f42..962976684 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicose_oar_xmrp_sns.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ta_aup0_rj_hhq0.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoseOArXMRpSNs(BaseModel): +class AicusapicoTaAUp0RjHHQ0(BaseModel): """ - AicusapicoseOArXMRpSNs + AicusapicoTaAUp0RjHHQ0 """ # noqa: E501 offline: StrictStr chatbot_id: StrictStr = Field(alias="chatbotId") @@ -53,7 +53,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoseOArXMRpSNs from a JSON string""" + """Create an instance of AicusapicoTaAUp0RjHHQ0 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -78,7 +78,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoseOArXMRpSNs from a dict""" + """Create an instance of AicusapicoTaAUp0RjHHQ0 from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f.py similarity index 80% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f.py index 0c15c437c..ba09044d7 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,18 +19,18 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico2_twv_xbhs_tncy_config import Aicusapico2TwvXbhsTncyConfig -from openapi_client.models.aicusapicoi_x_uam8_n8_dh8l_items_inner import AicusapicoiXUam8N8Dh8lItemsInner +from openapi_client.models.aicusapico_h_wyv_bn_b1_qgg_i_config import AicusapicoHWyvBnB1QggIConfig +from openapi_client.models.aicusapico_ya9_vor_uqinz_f_items_inner import AicusapicoYa9VOrUQINzFItemsInner from typing import Optional, Set from typing_extensions import Self -class AicusapicoiXUam8N8Dh8l(BaseModel): +class AicusapicoYa9VOrUQINzF(BaseModel): """ - AicusapicoiXUam8N8Dh8l + AicusapicoYa9VOrUQINzF """ # noqa: E501 chatbot_ids: Optional[List[StrictStr]] = None - config: Optional[Aicusapico2TwvXbhsTncyConfig] = Field(default=None, alias="Config") - items: Optional[List[AicusapicoiXUam8N8Dh8lItemsInner]] = Field(default=None, alias="Items") + config: Optional[AicusapicoHWyvBnB1QggIConfig] = Field(default=None, alias="Config") + items: Optional[List[AicusapicoYa9VOrUQINzFItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["chatbot_ids", "Config", "Items", "Count"] @@ -52,7 +52,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoiXUam8N8Dh8l from a JSON string""" + """Create an instance of AicusapicoYa9VOrUQINzF from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -87,7 +87,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoiXUam8N8Dh8l from a dict""" + """Create an instance of AicusapicoYa9VOrUQINzF from a dict""" if obj is None: return None @@ -96,8 +96,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "chatbot_ids": obj.get("chatbot_ids"), - "Config": Aicusapico2TwvXbhsTncyConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, - "Items": [AicusapicoiXUam8N8Dh8lItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Config": AicusapicoHWyvBnB1QggIConfig.from_dict(obj["Config"]) if obj.get("Config") is not None else None, + "Items": [AicusapicoYa9VOrUQINzFItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f_items_inner.py similarity index 90% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f_items_inner.py index 28529c908..13fe31a22 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoi_x_uam8_n8_dh8l_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ya9_vor_uqinz_f_items_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoiXUam8N8Dh8lItemsInner(BaseModel): +class AicusapicoYa9VOrUQINzFItemsInner(BaseModel): """ - AicusapicoiXUam8N8Dh8lItemsInner + AicusapicoYa9VOrUQINzFItemsInner """ # noqa: E501 chatbot_id: StrictStr = Field(alias="ChatbotId") model_name: StrictStr = Field(alias="ModelName") @@ -50,7 +50,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a JSON string""" + """Create an instance of AicusapicoYa9VOrUQINzFItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -75,7 +75,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoiXUam8N8Dh8lItemsInner from a dict""" + """Create an instance of AicusapicoYa9VOrUQINzFItemsInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ze_nx832z_hfgx.py similarity index 90% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ze_nx832z_hfgx.py index 7dc1f75c0..45bdd7770 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_eoc_l_nul8cwxa.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_ze_nx832z_hfgx.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoEOcLNul8cwxa(BaseModel): +class AicusapicoZeNx832zHfgx(BaseModel): """ - AicusapicoEOcLNul8cwxa + AicusapicoZeNx832zHfgx """ # noqa: E501 chatbot_id: StrictStr = Field(alias="chatbotId") index: StrictStr @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoEOcLNul8cwxa from a JSON string""" + """Create an instance of AicusapicoZeNx832zHfgx from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -76,7 +76,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoEOcLNul8cwxa from a dict""" + """Create an instance of AicusapicoZeNx832zHfgx from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r.py similarity index 85% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r.py index 18191678c..15ef5b657 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,17 +19,17 @@ from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapicoh_qb_fv37cvt_qs_index_ids import AicusapicohQbFv37cvtQSIndexIds +from openapi_client.models.aicusapicoa_oeh_yyqx8ql_r_index_ids import AicusapicoaOehYyqx8qlRIndexIds from typing import Optional, Set from typing_extensions import Self -class AicusapicohQbFv37cvtQS(BaseModel): +class AicusapicoaOehYyqx8qlR(BaseModel): """ - AicusapicohQbFv37cvtQS + AicusapicoaOehYyqx8qlR """ # noqa: E501 chatbot_id: Optional[StrictStr] = Field(default=None, alias="chatbotId") group_name: Optional[StrictStr] = Field(default=None, alias="groupName") - index_ids: Optional[AicusapicohQbFv37cvtQSIndexIds] = Field(default=None, alias="indexIds") + index_ids: Optional[AicusapicoaOehYyqx8qlRIndexIds] = Field(default=None, alias="indexIds") message: Optional[StrictStr] = Field(default=None, alias="Message") __properties: ClassVar[List[str]] = ["chatbotId", "groupName", "indexIds", "Message"] @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicohQbFv37cvtQS from a JSON string""" + """Create an instance of AicusapicoaOehYyqx8qlR from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -79,7 +79,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicohQbFv37cvtQS from a dict""" + """Create an instance of AicusapicoaOehYyqx8qlR from a dict""" if obj is None: return None @@ -89,7 +89,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "chatbotId": obj.get("chatbotId"), "groupName": obj.get("groupName"), - "indexIds": AicusapicohQbFv37cvtQSIndexIds.from_dict(obj["indexIds"]) if obj.get("indexIds") is not None else None, + "indexIds": AicusapicoaOehYyqx8qlRIndexIds.from_dict(obj["indexIds"]) if obj.get("indexIds") is not None else None, "Message": obj.get("Message") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r_index_ids.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r_index_ids.py index 0c82f4f98..2abe27d9e 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoh_qb_fv37cvt_qs_index_ids.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoa_oeh_yyqx8ql_r_index_ids.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicohQbFv37cvtQSIndexIds(BaseModel): +class AicusapicoaOehYyqx8qlRIndexIds(BaseModel): """ - AicusapicohQbFv37cvtQSIndexIds + AicusapicoaOehYyqx8qlRIndexIds """ # noqa: E501 qq: Optional[StrictStr] = None qd: Optional[StrictStr] = None @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicohQbFv37cvtQSIndexIds from a JSON string""" + """Create an instance of AicusapicoaOehYyqx8qlRIndexIds from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -74,7 +74,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicohQbFv37cvtQSIndexIds from a dict""" + """Create an instance of AicusapicoaOehYyqx8qlRIndexIds from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicod_betf4_zuz6_wh.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicod_betf4_zuz6_wh.py index 8acdc2c49..1ce779165 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico_cyd129_m65y_kv.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicod_betf4_zuz6_wh.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicoCyd129M65yKV(BaseModel): +class AicusapicodBETf4Zuz6WH(BaseModel): """ - AicusapicoCyd129M65yKV + AicusapicodBETf4Zuz6WH """ # noqa: E501 content_type: StrictStr file_name: StrictStr @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicoCyd129M65yKV from a JSON string""" + """Create an instance of AicusapicodBETf4Zuz6WH from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicoCyd129M65yKV from a dict""" + """Create an instance of AicusapicodBETf4Zuz6WH from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm.py similarity index 84% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm.py index 3a1dc74c7..f400644a7 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,15 +19,15 @@ from pydantic import BaseModel, ConfigDict, Field, StrictInt from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner import Aicusapico35klzY80ikPhItemsInner +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner import AicusapicoqhVwTerAVPQmItemsInner from typing import Optional, Set from typing_extensions import Self -class Aicusapico35klzY80ikPh(BaseModel): +class AicusapicoqhVwTerAVPQm(BaseModel): """ - Aicusapico35klzY80ikPh + AicusapicoqhVwTerAVPQm """ # noqa: E501 - items: Optional[List[Aicusapico35klzY80ikPhItemsInner]] = Field(default=None, alias="Items") + items: Optional[List[AicusapicoqhVwTerAVPQmItemsInner]] = Field(default=None, alias="Items") count: Optional[StrictInt] = Field(default=None, alias="Count") __properties: ClassVar[List[str]] = ["Items", "Count"] @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPh from a JSON string""" + """Create an instance of AicusapicoqhVwTerAVPQm from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -81,7 +81,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPh from a dict""" + """Create an instance of AicusapicoqhVwTerAVPQm from a dict""" if obj is None: return None @@ -89,7 +89,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "Items": [Aicusapico35klzY80ikPhItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, + "Items": [AicusapicoqhVwTerAVPQmItemsInner.from_dict(_item) for _item in obj["Items"]] if obj.get("Items") is not None else None, "Count": obj.get("Count") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner.py similarity index 84% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner.py index c7d67e18a..6dbf9dfe8 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,18 +19,18 @@ from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List, Optional -from openapi_client.models.aicusapico35klz_y80ik_ph_items_inner_qa_list_inner import Aicusapico35klzY80ikPhItemsInnerQAListInner +from openapi_client.models.aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner import AicusapicoqhVwTerAVPQmItemsInnerQAListInner from typing import Optional, Set from typing_extensions import Self -class Aicusapico35klzY80ikPhItemsInner(BaseModel): +class AicusapicoqhVwTerAVPQmItemsInner(BaseModel): """ - Aicusapico35klzY80ikPhItemsInner + AicusapicoqhVwTerAVPQmItemsInner """ # noqa: E501 s3_path: StrictStr = Field(alias="s3Path") create_time: StrictStr = Field(alias="createTime") s3_prefix: StrictStr = Field(alias="s3Prefix") - qa_list: Optional[List[Aicusapico35klzY80ikPhItemsInnerQAListInner]] = Field(default=None, alias="QAList") + qa_list: Optional[List[AicusapicoqhVwTerAVPQmItemsInnerQAListInner]] = Field(default=None, alias="QAList") status: StrictStr __properties: ClassVar[List[str]] = ["s3Path", "createTime", "s3Prefix", "QAList", "status"] @@ -52,7 +52,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPhItemsInner from a JSON string""" + """Create an instance of AicusapicoqhVwTerAVPQmItemsInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -84,7 +84,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPhItemsInner from a dict""" + """Create an instance of AicusapicoqhVwTerAVPQmItemsInner from a dict""" if obj is None: return None @@ -95,7 +95,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "s3Path": obj.get("s3Path"), "createTime": obj.get("createTime"), "s3Prefix": obj.get("s3Prefix"), - "QAList": [Aicusapico35klzY80ikPhItemsInnerQAListInner.from_dict(_item) for _item in obj["QAList"]] if obj.get("QAList") is not None else None, + "QAList": [AicusapicoqhVwTerAVPQmItemsInnerQAListInner.from_dict(_item) for _item in obj["QAList"]] if obj.get("QAList") is not None else None, "status": obj.get("status") }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner.py index 0da9bf782..ed773499a 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapico35klz_y80ik_ph_items_inner_qa_list_inner.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqh_vw_ter_avpqm_items_inner_qa_list_inner.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapico35klzY80ikPhItemsInnerQAListInner(BaseModel): +class AicusapicoqhVwTerAVPQmItemsInnerQAListInner(BaseModel): """ - Aicusapico35klzY80ikPhItemsInnerQAListInner + AicusapicoqhVwTerAVPQmItemsInnerQAListInner """ # noqa: E501 question: Optional[StrictStr] = None kwargs: Optional[StrictStr] = None @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a JSON string""" + """Create an instance of AicusapicoqhVwTerAVPQmItemsInnerQAListInner from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -74,7 +74,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapico35klzY80ikPhItemsInnerQAListInner from a dict""" + """Create an instance of AicusapicoqhVwTerAVPQmItemsInnerQAListInner from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2.py similarity index 85% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2.py index f6c7ab336..9a5802c75 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -19,18 +19,18 @@ from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List -from openapi_client.models.aicusapicob_mn2p_lk9_av_e8_index import AicusapicobMN2pLK9AvE8Index +from openapi_client.models.aicusapicou6_vks_roj90h2_index import Aicusapicou6VksROJ90h2Index from typing import Optional, Set from typing_extensions import Self -class AicusapicobMN2pLK9AvE8(BaseModel): +class Aicusapicou6VksROJ90h2(BaseModel): """ - AicusapicobMN2pLK9AvE8 + Aicusapicou6VksROJ90h2 """ # noqa: E501 model_name: StrictStr = Field(alias="modelName") chatbot_id: StrictStr = Field(alias="chatbotId") model_id: StrictStr = Field(alias="modelId") - index: AicusapicobMN2pLK9AvE8Index + index: Aicusapicou6VksROJ90h2Index __properties: ClassVar[List[str]] = ["modelName", "chatbotId", "modelId", "index"] model_config = ConfigDict( @@ -51,7 +51,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicobMN2pLK9AvE8 from a JSON string""" + """Create an instance of Aicusapicou6VksROJ90h2 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -79,7 +79,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicobMN2pLK9AvE8 from a dict""" + """Create an instance of Aicusapicou6VksROJ90h2 from a dict""" if obj is None: return None @@ -90,7 +90,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "modelName": obj.get("modelName"), "chatbotId": obj.get("chatbotId"), "modelId": obj.get("modelId"), - "index": AicusapicobMN2pLK9AvE8Index.from_dict(obj["index"]) if obj.get("index") is not None else None + "index": Aicusapicou6VksROJ90h2Index.from_dict(obj["index"]) if obj.get("index") is not None else None }) return _obj diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2_index.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2_index.py index a9d352cb1..cf059bca4 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicob_mn2p_lk9_av_e8_index.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicou6_vks_roj90h2_index.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AicusapicobMN2pLK9AvE8Index(BaseModel): +class Aicusapicou6VksROJ90h2Index(BaseModel): """ - AicusapicobMN2pLK9AvE8Index + Aicusapicou6VksROJ90h2Index """ # noqa: E501 qq: StrictStr qd: StrictStr @@ -49,7 +49,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AicusapicobMN2pLK9AvE8Index from a JSON string""" + """Create an instance of Aicusapicou6VksROJ90h2Index from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -74,7 +74,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AicusapicobMN2pLK9AvE8Index from a dict""" + """Create an instance of Aicusapicou6VksROJ90h2Index from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoyip3e_ubuk13_z.py similarity index 89% rename from api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py rename to api_test/biz_logic/rest_api/openapi_client/models/aicusapicoyip3e_ubuk13_z.py index 6c6b58728..45792148e 100644 --- a/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoqew7t5v_ta2ak.py +++ b/api_test/biz_logic/rest_api/openapi_client/models/aicusapicoyip3e_ubuk13_z.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class Aicusapicoqew7t5vTA2ak(BaseModel): +class Aicusapicoyip3eUBUK13Z(BaseModel): """ - Aicusapicoqew7t5vTA2ak + Aicusapicoyip3eUBUK13Z """ # noqa: E501 data: Optional[List[StrictStr]] = None message: Optional[StrictStr] = None @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of Aicusapicoqew7t5vTA2ak from a JSON string""" + """Create an instance of Aicusapicoyip3eUBUK13Z from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of Aicusapicoqew7t5vTA2ak from a dict""" + """Create an instance of Aicusapicoyip3eUBUK13Z from a dict""" if obj is None: return None diff --git a/api_test/biz_logic/rest_api/openapi_client/rest.py b/api_test/biz_logic/rest_api/openapi_client/rest.py index 70ab0f033..2ed63f987 100644 --- a/api_test/biz_logic/rest_api/openapi_client/rest.py +++ b/api_test/biz_logic/rest_api/openapi_client/rest.py @@ -5,7 +5,7 @@ AI-Customer-Service - Core API - The version of the OpenAPI document: 2024-10-21T08:32:58Z + The version of the OpenAPI document: 2024-10-24T04:30:07Z Generated by OpenAPI Generator (https://openapi-generator.tech) Do not edit the class manually. diff --git a/api_test/sourceGen.sh b/api_test/sourceGen.sh index 22112ee9c..323973891 100755 --- a/api_test/sourceGen.sh +++ b/api_test/sourceGen.sh @@ -1,7 +1,7 @@ #!/bin/bash yes | rm -rf ./biz_logic/rest_api/* -mkdir generated-client +mkdir generated-client chmod a+w ./generated-client openapi-generator-cli generate -i Intelli-Agent-RESTful-API-prod-oas30.json -g python -o ./generated-client @@ -10,7 +10,12 @@ mv ./generated-client/docs ./biz_logic/rest_api/ mv ./generated-client/openapi_client ./biz_logic/rest_api/ touch ./biz_logic/rest_api/__init__.py -sed -i '/__version__ = "1.0.0"/a\import sys\nimport os\nopenapi_client_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../biz_logic/rest_api"))\nsys.path.insert(0, openapi_client_path)\n' ./biz_logic/rest_api/openapi_client/__init__.py +sed -i '' '/__version__ = "1.0.0"/a\ +import sys\ +import os\ +openapi_client_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../biz_logic/rest_api"))\ +sys.path.insert(0, openapi_client_path)\ +' ./biz_logic/rest_api/openapi_client/__init__.py rm -rf ./generated-client diff --git a/api_test/test_case/test_01_rest_document.py b/api_test/test_case/test_01_rest_document.py index ac5c4cae5..092ba5683 100644 --- a/api_test/test_case/test_01_rest_document.py +++ b/api_test/test_case/test_01_rest_document.py @@ -7,6 +7,7 @@ import requests import boto3 from api_test.biz_logic.rest_api import openapi_client +# from models import AicusapicoYSAGBFYarsoe from .utils import step logger = logging.getLogger(__name__) @@ -64,13 +65,14 @@ def teardown_method(self, method): def test_01_upload_document_pdf(self): '''test case''' - param = openapi_client.Aicusapico51RafCAYOxiZ(content_type='application/pdf', file_name="summary.pdf") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='application/pdf', file_name="summary.pdf") + response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_01_upload_document_pdf test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.pdf") - post_param = openapi_client.AicusapicoseOArXMRpSNs( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_01_upload_document_pdf test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.pdf") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -82,13 +84,13 @@ def test_01_upload_document_pdf(self): def test_02_upload_document_docx(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document', file_name="summary.docx") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document', file_name="summary.docx") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_02_upload_document_docx test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.docx") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_02_upload_document_docx test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.docx") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -99,13 +101,13 @@ def test_02_upload_document_docx(self): def test_03_upload_document_csv(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='text/csv', file_name="summary.csv") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='text/csv', file_name="summary.csv") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_03_upload_document_csv test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.csv") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_03_upload_document_csv test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.csv") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -116,13 +118,13 @@ def test_03_upload_document_csv(self): def test_04_upload_document_html(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='text/html', file_name="summary.html") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='text/html', file_name="summary.html") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_04_upload_document_html test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.html") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_04_upload_document_html test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.html") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -133,13 +135,13 @@ def test_04_upload_document_html(self): def test_05_upload_document_jpeg(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='image/jpeg', file_name="summary.jpeg") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='image/jpeg', file_name="summary.jpeg") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_05_upload_document_jpeg test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.jpeg") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_05_upload_document_jpeg test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.jpeg") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -150,13 +152,13 @@ def test_05_upload_document_jpeg(self): def test_06_upload_document_jpg(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='image/jpeg', file_name="summary.jpg") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='image/jpeg', file_name="summary.jpg") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_06_upload_document_jpg test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.jpg") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_06_upload_document_jpg test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.jpg") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -167,13 +169,13 @@ def test_06_upload_document_jpg(self): def test_07_upload_document_png(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='image/png', file_name="summary.png") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='image/png', file_name="summary.png") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_07_upload_document_png test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.png") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_07_upload_document_png test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.png") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -184,13 +186,13 @@ def test_07_upload_document_png(self): def test_08_upload_document_json(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='application/json', file_name="summary.json") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='application/json', file_name="summary.json") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_08_upload_document_json test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.json") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_08_upload_document_json test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.json") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -201,13 +203,13 @@ def test_08_upload_document_json(self): def test_09_upload_document_md(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='text/markdown', file_name="summary.md") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='text/markdown', file_name="summary.md") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_09_upload_document_md test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.md") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_09_upload_document_md test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.md") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -218,13 +220,13 @@ def test_09_upload_document_md(self): def test_10_upload_document_txt(self): '''test case''' - param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='text/plain', file_name="summary.txt") + param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='text/plain', file_name="summary.txt") response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_10_upload_document_txt test failed" - self.__upload_file_to_s3(response.data, "./test_data/summary.txt") - post_param = openapi_client.IntellapicoNK9oLf1K1uex( - s3Bucket=response.s3_bucket, - s3Prefix=response.s3_prefix, + assert response.message==self.upload_success_msg, "test_10_upload_document_txt test failed" + self.__upload_file_to_s3(response.data.url, "./test_data/summary.txt") + post_param = openapi_client.AicusapicoTaAUp0RjHHQ0( + s3Bucket=response.data.s3_bucket, + s3Prefix=response.data.s3_prefix, indexType="qd", offline="true", chatbotId="admin", @@ -235,10 +237,10 @@ def test_10_upload_document_txt(self): # def test_11_upload_document_jsonl(self): # '''test case''' - # param = openapi_client.Intellapicormo5LBZXS9Rb(content_type='application/jsonlines', file_name="summary.jsonl") + # param = openapi_client.AicusapicodBETf4Zuz6WH(content_type='application/jsonlines', file_name="summary.jsonl") # response = self.api_instance.knowledge_base_kb_presigned_url_post(param) - # assert response.message==self.upload_success_msg and response.data.startswith(self.upload_prefix_data), "test_11_upload_document_jsonl test failed" - # self.__upload_file_to_s3(response.data, "./test_data/summary.jsonl") + # assert response.message==self.upload_success_msg, "test_11_upload_document_jsonl test failed" + # self.__upload_file_to_s3(response.data.url, "./test_data/summary.jsonl") def test_12_list_document(self): '''test case''' @@ -307,9 +309,9 @@ def test_21_exec_document_txt(self): def test_23_delete_document(self): '''test case''' - param = openapi_client.IntellapicoH4A9yvm8c1p3(executionId=list(self.exeIdDict.values())) + param = openapi_client.AicusapicoDPw375iu4xb1(executionId=list(self.exeIdDict.values())) response = self.api_instance.knowledge_base_executions_delete(param) - assert response.message=="The deletion has completed", "test_23_delete_document test failed" + assert response.message=="The deletion of specified documents has started", "test_23_delete_document test failed" # def test_24_upload_mismatch_document(self): # '''test case''' From 7a124c1bfc0dd651f5444e64475b248859389255 Mon Sep 17 00:00:00 2001 From: Xu Han Date: Thu, 24 Oct 2024 08:28:01 +0000 Subject: [PATCH 025/110] fix: fix npm run config --- source/infrastructure/cli/magic-config.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/source/infrastructure/cli/magic-config.ts b/source/infrastructure/cli/magic-config.ts index 9acb7d2df..31eb34be7 100644 --- a/source/infrastructure/cli/magic-config.ts +++ b/source/infrastructure/cli/magic-config.ts @@ -113,8 +113,9 @@ async function getAwsAccountAndRegion() { options.enableChat = config.chat.enabled; options.bedrockRegion = config.chat.bedrockRegion; options.enableConnect = config.chat.amazonConnect.enabled; - options.defaultEmbedding = (config.model.embeddingsModels ?? []) - .find((m: any) => m.default)?.name ?? embeddingModels[0].name; + options.defaultEmbedding = config.model.embeddingsModels && config.model.embeddingsModels.length > 0 + ? config.model.embeddingsModels[0].name + : embeddingModels[0].name; options.defaultLlm = config.model.llms.find((m) => m.provider === "bedrock")?.name; options.sagemakerModelS3Bucket = config.model.modelConfig.modelAssetsBucket; options.enableUI = config.ui.enabled; @@ -516,3 +517,4 @@ async function processCreateOptions(options: any): Promise { ? createConfig(config) : console.log("Skipping"); } + From 76b37ac678dc9ae0d00d6cce2490b932383606ec Mon Sep 17 00:00:00 2001 From: NingLyu Date: Fri, 25 Oct 2024 02:49:39 +0000 Subject: [PATCH 026/110] chore: refactor main --- source/lambda/online/lambda_main/main.py | 304 ++++++++++++----------- 1 file changed, 162 insertions(+), 142 deletions(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 2e4ae3f75..9b44a4853 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -65,54 +65,18 @@ def get_secret_value(secret_arn: str): raise Exception("Fail to retrieve the secret value") -def connect_case_event_handler(event_body: dict, context: dict, executor): - performed_by = event_body["detail"]["performedBy"]["iamPrincipalArn"] - logger.info(performed_by) - if "AWSServiceRoleForAmazonConnect" not in performed_by: - return None - - executor_body = compose_connect_body(event_body, context) +def create_ddb_history_obj(session_id: str, user_id: str, client_type: str) -> DynamoDBChatMessageHistory: + """Create a DynamoDBChatMessageHistory object - try: - executor_response: dict = executor(executor_body) - response_message = executor_response["message"]["content"] - logger.info(response_message) - logger.info("Add response message to case comment") - - related_item = event_body["detail"]["relatedItem"] - case_id = related_item["caseId"] - - response = connect_client.create_related_item( - caseId=case_id, - content={ - "comment": {"body": response_message, "contentType": "Text/Plain"}, - }, - domainId=connect_domain_id, - performedBy={ - "userArn": connect_user_arn, - }, - type="Comment", - ) - logger.info(response) - except Exception as e: - msg = traceback.format_exc() - logger.exception("Main exception:%s" % msg) - return "An exception has occurred, check CloudWatch log for more details" - - return {"status": "OK", "message": "Amazon Connect event has been processed"} - - -def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): - request_timestamp = context["request_timestamp"] - client_type = event_body.get("client_type", "default_client_type") - session_id = event_body.get("session_id", f"session_{request_timestamp}") - para_chatbot_config = event_body.get("chatbot_config", {}) - user_id = event_body.get("user_id", "default_user_id") - group_name = para_chatbot_config.get("group_name", "Admin") - chatbot_id = para_chatbot_config.get("chatbot_id", "admin") - use_history = para_chatbot_config.get("use_history", "true").lower() == "true" + Args: + session_id (str): The session id + user_id (str): The user id + client_type (str): The client type - ddb_history_obj = DynamoDBChatMessageHistory( + Returns: + DynamoDBChatMessageHistory: The DynamoDBChatMessageHistory object + """ + return DynamoDBChatMessageHistory( sessions_table_name=sessions_table_name, messages_table_name=messages_table_name, session_id=session_id, @@ -120,44 +84,18 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): client_type=client_type, ) - chat_history = ddb_history_obj.messages_as_langchain - - standard_event_body = { - "query": event_body["query"], - "entry_type": EntryType.COMMON, - "session_id": session_id, - "user_id": user_id, - "chatbot_config": { - "chatbot_mode": "agent", - "use_history": use_history, - }, - "stream": False, - } - - standard_event_body["chat_history"] = chat_history - standard_event_body["ddb_history_obj"] = ddb_history_obj - standard_event_body["request_timestamp"] = request_timestamp - standard_event_body["chatbot_config"]["user_id"] = user_id - standard_event_body["chatbot_config"]["group_name"] = group_name - standard_event_body["chatbot_config"]["chatbot_id"] = chatbot_id - standard_event_body["message_id"] = str(uuid.uuid4()) - standard_event_body["custom_message_id"] = "" - standard_event_body["ws_connection_id"] = "" - - standard_response = entry_executor(standard_event_body) - aics_response = { - "role": standard_response["message"]["role"], - "content": standard_response["message"]["content"], - "category": standard_response.get("current_agent_intent_type", ""), - "intent_id": "i0", - "intent_completed": "true", - } - - return aics_response +def compose_connect_body(event_body: dict, context: dict): + """ + Compose the body for the Amazon Connect API request based on the event and context. + Args: + event_body (dict): The event body received from the Lambda function. + context (dict): The context object passed to the Lambda function. -def compose_connect_body(event_body: dict, context: dict): + Returns: + dict: The composed body for the Amazon Connect API request. + """ request_timestamp = context["request_timestamp"] chatbot_id = os.environ.get("CONNECT_BOT_ID", "admin") related_item = event_body["detail"]["relatedItem"] @@ -237,74 +175,156 @@ def compose_connect_body(event_body: dict, context: dict): return agent_flow_body +def assemble_event_body(event_body: dict, context: dict): + """ + Assembles the event body for processing based on the provided event body and context. + + This function takes the event body and context as input, extracts relevant information, and constructs a new event body that includes the client type, session ID, user ID, message ID, group name, and chatbot ID. The session ID is generated based on the request timestamp if not provided in the event body. + + Parameters: + event_body (dict): The original event body received by the lambda function. + context (dict): The context object provided by the lambda function, containing information such as the request timestamp. + + Returns: + dict: The assembled event body with the extracted and generated information. + """ + body = {} + request_timestamp = context["request_timestamp"] + body["client_type"] = event_body.get("client_type", "default_client_type") + body["session_id"] = event_body.get( + "session_id", f"session_{int(request_timestamp)}") + body["user_id"] = event_body.get("user_id", "default_user_id") + body["message_id"] = event_body.get("custom_message_id", str(uuid.uuid4())) + body["group_name"] = event_body.get( + "chatbot_config", {}).get("group_name", "Admin") + body["chatbot_id"] = event_body.get( + "chatbot_config", {}).get("chatbot_id", "admin") + + return body + + +def connect_case_event_handler(event_body: dict, context: dict, executor): + performed_by = event_body["detail"]["performedBy"]["iamPrincipalArn"] + logger.info(performed_by) + if "AWSServiceRoleForAmazonConnect" not in performed_by: + return None + + executor_body = compose_connect_body(event_body, context) + + try: + executor_response: dict = executor(executor_body) + response_message = executor_response["message"]["content"] + logger.info(response_message) + logger.info("Add response message to case comment") + + related_item = event_body["detail"]["relatedItem"] + case_id = related_item["caseId"] + + response = connect_client.create_related_item( + caseId=case_id, + content={ + "comment": {"body": response_message, "contentType": "Text/Plain"}, + }, + domainId=connect_domain_id, + performedBy={ + "userArn": connect_user_arn, + }, + type="Comment", + ) + logger.info(response) + except Exception as e: + msg = traceback.format_exc() + logger.exception("Main exception:%s" % msg) + return "An exception has occurred, check CloudWatch log for more details" + + return {"status": "OK", "message": "Amazon Connect event has been processed"} + + +def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): + + use_history = event_body.get("chatbot_config", {}).get( + "use_history", "true").lower() == "true" + + ddb_history_obj = create_ddb_history_obj(session_id, user_id, client_type) + chat_history = ddb_history_obj.messages_as_langchain + + standard_event_body = { + "query": event_body["query"], + "entry_type": EntryType.COMMON, + "session_id": session_id, + "user_id": user_id, + "chatbot_config": { + "chatbot_mode": "agent", + "use_history": use_history, + }, + "stream": False, + } + + standard_event_body["chat_history"] = chat_history + standard_event_body["ddb_history_obj"] = ddb_history_obj + standard_event_body["request_timestamp"] = request_timestamp + standard_event_body["chatbot_config"]["user_id"] = user_id + standard_event_body["chatbot_config"]["group_name"] = group_name + standard_event_body["chatbot_config"]["chatbot_id"] = chatbot_id + standard_event_body["message_id"] = + standard_event_body["custom_message_id"] = "" + standard_event_body["ws_connection_id"] = "" + + standard_response = entry_executor(standard_event_body) + + aics_response = { + "role": standard_response["message"]["role"], + "content": standard_response["message"]["content"], + "category": standard_response.get("current_agent_intent_type", ""), + "intent_id": "i0", + "intent_completed": "true", + } + + return aics_response + + +def default_event_handler(event_body: dict, context: dict, entry_executor): + ws_connection_id = context.get("ws_connection_id") + + load_ws_client(websocket_url) + + ddb_history_obj = create_ddb_history_obj(session_id, user_id, client_type) + chat_history = ddb_history_obj.messages_as_langchain + + event_body["stream"] = context["stream"] + event_body["chat_history"] = chat_history + event_body["ws_connection_id"] = ws_connection_id + event_body["custom_message_id"] = message_id + event_body["message_id"] = message_id + event_body["ddb_history_obj"] = ddb_history_obj + event_body["request_timestamp"] = request_timestamp + event_body["chatbot_config"]["user_id"] = user_id + event_body["chatbot_config"]["group_name"] = group_name + event_body["chatbot_config"]["chatbot_id"] = chatbot_id + event_body["kb_enabled"] = kb_enabled + event_body["kb_type"] = kb_type + + # show debug info directly in local mode + if is_running_local(): + response: dict = entry_executor(event_body) + return response + else: + response: dict = entry_executor(event_body) + return response + + @chatbot_lambda_call_wrapper def lambda_handler(event_body: dict, context: dict): - logger.info(f"raw event_body: {event_body}") + logger.info(f"Raw event_body: {event_body}") entry_type = event_body.get("entry_type", EntryType.COMMON).lower() entry_executor = get_entry(entry_type) stream = context["stream"] if event_body.get("source", "") == "aws.cases": - # Connect case event + # Amazon Connect case event return connect_case_event_handler(event_body, context, entry_executor) elif not stream: + # Restful API return aics_restapi_event_handler(event_body, context, entry_executor) else: - - ws_connection_id = context.get("ws_connection_id") - request_timestamp = context["request_timestamp"] - load_ws_client(websocket_url) - - client_type = event_body.get("client_type", "default_client_type") - session_id = event_body.get("session_id", f"session_{int(request_timestamp)}") - message_id = event_body.get("custom_message_id", str(uuid.uuid4())) - user_id = event_body.get("user_id", "default_user_id") - # TODO Need to modify key - group_name = event_body.get("chatbot_config", {}).get("group_name", "Admin") - chatbot_id = event_body.get("chatbot_config", {}).get("chatbot_id", "admin") - - ddb_history_obj = DynamoDBChatMessageHistory( - sessions_table_name=sessions_table_name, - messages_table_name=messages_table_name, - session_id=session_id, - user_id=user_id, - client_type=client_type, - ) - - chat_history = ddb_history_obj.messages_as_langchain - - event_body["stream"] = stream - event_body["chat_history"] = chat_history - event_body["ws_connection_id"] = ws_connection_id - event_body["custom_message_id"] = message_id - event_body["message_id"] = message_id - event_body["ddb_history_obj"] = ddb_history_obj - event_body["request_timestamp"] = request_timestamp - event_body["chatbot_config"]["user_id"] = user_id - event_body["chatbot_config"]["group_name"] = group_name - event_body["chatbot_config"]["chatbot_id"] = chatbot_id - event_body["kb_enabled"] = kb_enabled - event_body["kb_type"] = kb_type - # TODO: chatbot id add to event body - - # logger.info(f"event_body:\n{json.dumps(event_body,ensure_ascii=False,indent=2,cls=JSONEncoder)}") - # debuging - # show debug info directly in local mode - if is_running_local(): - response: dict = entry_executor(event_body) - return response - # r = process_response(event_body,response) - # if not stream: - # return r - # return "All records have been processed" - # return r - else: - try: - response: dict = entry_executor(event_body) - # r = process_response(event_body,response) - if not stream: - return response - return "All records have been processed" - except Exception as e: - msg = traceback.format_exc() - logger.exception("Main exception:%s" % msg) - return "An exception has occurred" + # WebSocket API + return default_event_handler(event_body, context, entry_executor) From 7fff338e324e0ed499f1e75bf430c8a25d88562f Mon Sep 17 00:00:00 2001 From: NingLyu Date: Fri, 25 Oct 2024 03:15:42 +0000 Subject: [PATCH 027/110] chore: enhancement --- source/lambda/online/lambda_main/main.py | 50 ++++++++++++++++-------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 9b44a4853..15f352cc4 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -190,6 +190,7 @@ def assemble_event_body(event_body: dict, context: dict): """ body = {} request_timestamp = context["request_timestamp"] + body["request_timestamp"] = request_timestamp body["client_type"] = event_body.get("client_type", "default_client_type") body["session_id"] = event_body.get( "session_id", f"session_{int(request_timestamp)}") @@ -204,6 +205,19 @@ def assemble_event_body(event_body: dict, context: dict): def connect_case_event_handler(event_body: dict, context: dict, executor): + """ + Handles the event processing for Amazon Connect cases. + + This function processes events related to Amazon Connect cases, specifically handling the creation of case comments. It extracts relevant information from the event body and context, checks the performedBy IAM principal ARN to ensure it's an Amazon Connect service role, and then composes an executor body for further processing. If the check passes, it attempts to execute the executor with the composed body, logs the response message, and adds the response message as a comment to the related case. + + Parameters: + event_body (dict): The event body received by the lambda function, containing details about the Amazon Connect case event. + context (dict): The context object provided by the lambda function, containing information such as the request timestamp. + executor (function): A function that executes the processing of the event, taking the executor body as input. + + Returns: + dict or str: Returns a dictionary with a status and message indicating the outcome of the processing, or a string indicating an exception has occurred. + """ performed_by = event_body["detail"]["performedBy"]["iamPrincipalArn"] logger.info(performed_by) if "AWSServiceRoleForAmazonConnect" not in performed_by: @@ -241,18 +255,19 @@ def connect_case_event_handler(event_body: dict, context: dict, executor): def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): - + assembled_body = assemble_event_body(event_body, context) use_history = event_body.get("chatbot_config", {}).get( "use_history", "true").lower() == "true" - ddb_history_obj = create_ddb_history_obj(session_id, user_id, client_type) + ddb_history_obj = create_ddb_history_obj( + assembled_body["session_id"], assembled_body["user_id"], assembled_body["client_type"]) chat_history = ddb_history_obj.messages_as_langchain standard_event_body = { "query": event_body["query"], "entry_type": EntryType.COMMON, - "session_id": session_id, - "user_id": user_id, + "session_id": assembled_body["session_id"], + "user_id": assembled_body["user_id"], "chatbot_config": { "chatbot_mode": "agent", "use_history": use_history, @@ -262,11 +277,11 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): standard_event_body["chat_history"] = chat_history standard_event_body["ddb_history_obj"] = ddb_history_obj - standard_event_body["request_timestamp"] = request_timestamp - standard_event_body["chatbot_config"]["user_id"] = user_id - standard_event_body["chatbot_config"]["group_name"] = group_name - standard_event_body["chatbot_config"]["chatbot_id"] = chatbot_id - standard_event_body["message_id"] = + standard_event_body["request_timestamp"] = assembled_body["request_timestamp"] + standard_event_body["chatbot_config"]["user_id"] = assembled_body["user_id"] + standard_event_body["chatbot_config"]["group_name"] = assembled_body["group_name"] + standard_event_body["chatbot_config"]["chatbot_id"] = assembled_body["chatbot_id"] + standard_event_body["message_id"] = assembled_body["message_id"] standard_event_body["custom_message_id"] = "" standard_event_body["ws_connection_id"] = "" @@ -285,22 +300,23 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): def default_event_handler(event_body: dict, context: dict, entry_executor): ws_connection_id = context.get("ws_connection_id") - + assembled_body = assemble_event_body(event_body, context) load_ws_client(websocket_url) - ddb_history_obj = create_ddb_history_obj(session_id, user_id, client_type) + ddb_history_obj = create_ddb_history_obj( + assembled_body["session_id"], assembled_body["user_id"], assembled_body["client_type"]) chat_history = ddb_history_obj.messages_as_langchain event_body["stream"] = context["stream"] event_body["chat_history"] = chat_history event_body["ws_connection_id"] = ws_connection_id - event_body["custom_message_id"] = message_id - event_body["message_id"] = message_id + event_body["custom_message_id"] = assembled_body["message_id"] + event_body["message_id"] = assembled_body["message_id"] event_body["ddb_history_obj"] = ddb_history_obj - event_body["request_timestamp"] = request_timestamp - event_body["chatbot_config"]["user_id"] = user_id - event_body["chatbot_config"]["group_name"] = group_name - event_body["chatbot_config"]["chatbot_id"] = chatbot_id + event_body["request_timestamp"] = assembled_body["request_timestamp"] + event_body["chatbot_config"]["user_id"] = assembled_body["user_id"] + event_body["chatbot_config"]["group_name"] = assembled_body["group_name"] + event_body["chatbot_config"]["chatbot_id"] = assembled_body["chatbot_id"] event_body["kb_enabled"] = kb_enabled event_body["kb_type"] = kb_type From b7f98bfded9562862d30ff5eeffd24b05082a9f8 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Fri, 25 Oct 2024 09:01:56 +0000 Subject: [PATCH 028/110] fix: update parameters value --- .../common_logic/common_utils/constant.py | 6 + source/lambda/online/lambda_main/main.py | 113 +++++++++++------- 2 files changed, 77 insertions(+), 42 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/constant.py b/source/lambda/online/common_logic/common_utils/constant.py index 518d35daf..6872a9a95 100644 --- a/source/lambda/online/common_logic/common_utils/constant.py +++ b/source/lambda/online/common_logic/common_utils/constant.py @@ -171,3 +171,9 @@ class IndexTag(Enum): @unique class KBType(Enum): AOS = "aos" + + +class ExecutionType(Enum): + AMAZON_CONNECT = "amazon_connect" + RESTFUL_API = "restful_api" + WEBSOCKET_API = "websocket_api" diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 15f352cc4..f8dd1d83a 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -5,14 +5,13 @@ import boto3 from botocore.exceptions import ClientError -from common_logic.common_utils.constant import EntryType +from common_logic.common_utils.constant import EntryType, ExecutionType from common_logic.common_utils.ddb_utils import DynamoDBChatMessageHistory from common_logic.common_utils.lambda_invoke_utils import ( chatbot_lambda_call_wrapper, is_running_local, ) from common_logic.common_utils.logger_utils import get_logger -from common_logic.common_utils.response_utils import process_response from common_logic.common_utils.websocket_utils import load_ws_client from lambda_main.main_utils.online_entries import get_entry @@ -224,40 +223,47 @@ def connect_case_event_handler(event_body: dict, context: dict, executor): return None executor_body = compose_connect_body(event_body, context) + executor_response: dict = executor(executor_body) + response_message = executor_response["message"]["content"] + logger.info(response_message) + logger.info("Add response message to case comment") - try: - executor_response: dict = executor(executor_body) - response_message = executor_response["message"]["content"] - logger.info(response_message) - logger.info("Add response message to case comment") - - related_item = event_body["detail"]["relatedItem"] - case_id = related_item["caseId"] - - response = connect_client.create_related_item( - caseId=case_id, - content={ - "comment": {"body": response_message, "contentType": "Text/Plain"}, - }, - domainId=connect_domain_id, - performedBy={ - "userArn": connect_user_arn, - }, - type="Comment", - ) - logger.info(response) - except Exception as e: - msg = traceback.format_exc() - logger.exception("Main exception:%s" % msg) - return "An exception has occurred, check CloudWatch log for more details" + related_item = event_body["detail"]["relatedItem"] + case_id = related_item["caseId"] + + response = connect_client.create_related_item( + caseId=case_id, + content={ + "comment": {"body": response_message, "contentType": "Text/Plain"}, + }, + domainId=connect_domain_id, + performedBy={ + "userArn": connect_user_arn, + }, + type="Comment", + ) + logger.info(response) return {"status": "OK", "message": "Amazon Connect event has been processed"} -def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): +def restapi_event_handler(event_body: dict, context: dict, entry_executor): + """ + Handles the event processing for Restful API requests. + + This function processes events related to Restful API requests, specifically handling the assembly of the event body for further processing. It extracts relevant information from the event body and context, checks the use of history, and then composes a standard event body for further processing. It attempts to execute the entry executor with the composed body, logs the response message, and returns the response. + + Parameters: + event_body (dict): The event body received by the lambda function, containing details about the Restful API request. + context (dict): The context object provided by the lambda function, containing information such as the request timestamp. + entry_executor (function): A function that executes the processing of the event, taking the standard event body as input. + + Returns: + dict: Returns a dictionary with the response from the entry executor, including the role, content, category, intent_id, and intent_completed. + """ assembled_body = assemble_event_body(event_body, context) - use_history = event_body.get("chatbot_config", {}).get( - "use_history", "true").lower() == "true" + use_history = str(event_body.get("chatbot_config", {}).get( + "use_history", "true")).lower() == "true" ddb_history_obj = create_ddb_history_obj( assembled_body["session_id"], assembled_body["user_id"], assembled_body["client_type"]) @@ -299,6 +305,19 @@ def aics_restapi_event_handler(event_body: dict, context: dict, entry_executor): def default_event_handler(event_body: dict, context: dict, entry_executor): + """ + Handles the default event (WebSocket API) processing for the lambda function. + + This function is responsible for processing events that do not require special handling, such as those from the WebSocket API. It assembles the event body, loads the WebSocket client, and prepares the DynamoDB history object and chat history for processing. The event body is then passed to the entry executor for further processing. + + Args: + event_body (dict): The event body received from the Lambda function. + context (dict): The context object passed to the Lambda function. + entry_executor (function): A function that executes the processing of the event, taking the standard event body as input. + + Returns: + dict: Returns a dictionary with the response from the entry executor. + """ ws_connection_id = context.get("ws_connection_id") assembled_body = assemble_event_body(event_body, context) load_ws_client(websocket_url) @@ -320,7 +339,7 @@ def default_event_handler(event_body: dict, context: dict, entry_executor): event_body["kb_enabled"] = kb_enabled event_body["kb_type"] = kb_type - # show debug info directly in local mode + # Show debug info directly in local mode if is_running_local(): response: dict = entry_executor(event_body) return response @@ -332,15 +351,25 @@ def default_event_handler(event_body: dict, context: dict, entry_executor): @chatbot_lambda_call_wrapper def lambda_handler(event_body: dict, context: dict): logger.info(f"Raw event_body: {event_body}") + run_type = ExecutionType.RESTFUL_API entry_type = event_body.get("entry_type", EntryType.COMMON).lower() - entry_executor = get_entry(entry_type) - stream = context["stream"] - if event_body.get("source", "") == "aws.cases": - # Amazon Connect case event - return connect_case_event_handler(event_body, context, entry_executor) - elif not stream: - # Restful API - return aics_restapi_event_handler(event_body, context, entry_executor) - else: - # WebSocket API - return default_event_handler(event_body, context, entry_executor) + try: + entry_executor = get_entry(entry_type) + stream = context["stream"] + if event_body.get("source", "") == "aws.cases": + # Amazon Connect case event + run_type = ExecutionType.AMAZON_CONNECT + return connect_case_event_handler(event_body, context, entry_executor) + elif not stream: + # Restful API + run_type = ExecutionType.RESTFUL_API + return restapi_event_handler(event_body, context, entry_executor) + else: + # WebSocket API + run_type = ExecutionType.WEBSOCKET_API + return default_event_handler(event_body, context, entry_executor) + except Exception as e: + if ExecutionType.WEBSOCKET_API == run_type: + pass + logger.error(f"An error occurred: {e}") + return {"error": str(e)} From a457316166562845e14be903d91710543e7d78e8 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Sun, 27 Oct 2024 19:02:53 +0800 Subject: [PATCH 029/110] update shell --- api_test/buildspec-20241012.yaml | 15 +++++++++------ source/script/build.sh | 16 ++++++++++++++++ 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/api_test/buildspec-20241012.yaml b/api_test/buildspec-20241012.yaml index 82a6497ff..9a4f83716 100644 --- a/api_test/buildspec-20241012.yaml +++ b/api_test/buildspec-20241012.yaml @@ -58,12 +58,6 @@ phases: echo "$install_start_time install start..." echo "----------------------------------------------------------------" git clone $REPOSITORY_URL --branch $CODE_BRANCH --single-branch - echo "build portal assets >>>" - pushd Intelli-Agent/source/portal - npm i -g pnpm - pnpm i - npm run build - popd deploy_start_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" echo "$deploy_start_time deploy start..." @@ -73,7 +67,16 @@ phases: echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" sed -i "s/support@example.com/${SUB_EMAIL}/g" bin/config.ts + sed -i "s/provider: \"bedrock\"/provider: \"sagemaker\"/g" bin/config.ts + sed -i "s/name: \"amazon.titan-embed-text-v2\"/name: \"bce-embedding-and-bge-reranker\"/g" bin/config.ts + sed -i "s/commitId: \"\"/commitId: \"43972580a35ceacacd31b95b9f430f695d07dde9\"/g" bin/config.ts + sed -i "s/dimensions: 1024,/dimensions: 768/g" bin/config.ts + sed -i "s/default: true//g" bin/config.ts + npm i -g pnpm pnpm i + # echo "bin/config.ts >>>>>>>" + # cat bin/config.ts + npm run build npx cdk synth > synth.log 2>&1 if [ ${PIPESTATUS[0]} -ne 0 ]; then echo "cdk synth failed" diff --git a/source/script/build.sh b/source/script/build.sh index 982c193e3..bbd4ce447 100644 --- a/source/script/build.sh +++ b/source/script/build.sh @@ -4,6 +4,21 @@ set -e # Load config.json config_file="../infrastructure/bin/config.json" +# 检查 config.json 是否存在 +if [[ ! -f $config_file ]]; then + # 调用 TypeScript 文件中的方法获取返回值 + config_file=$(node -e "require('../infrastructure/bin/config.ts').getConfig()") +# knowledge_base_enabled=$(jq -r '.knowledgeBase.enabled' $config_file) +# knowledge_base_intelliagent_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled' $config_file) +# knowledge_base_models_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.enabled' $config_file) +# ecr_repository=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.ecrRepository' $config_file) +# ecr_image_tag=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.ecrImageTag' $config_file) +# opensearch_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.vectorStore.opensearch.enabled' $config_file) +# embedding_model_provider=$(jq -r '.model.embeddingsModels[0].provider' $config_file) +# model_assets_bucket=$(jq -r '.model.modelConfig.modelAssetsBucket' $config_file) +# ui_enabled=$(jq -r '.ui.enabled' $config_file) +# else +fi knowledge_base_enabled=$(jq -r '.knowledgeBase.enabled' $config_file) knowledge_base_intelliagent_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled' $config_file) knowledge_base_models_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.enabled' $config_file) @@ -13,6 +28,7 @@ opensearch_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.vect embedding_model_provider=$(jq -r '.model.embeddingsModels[0].provider' $config_file) model_assets_bucket=$(jq -r '.model.modelConfig.modelAssetsBucket' $config_file) ui_enabled=$(jq -r '.ui.enabled' $config_file) +# fi echo "Knowledge Base Enabled: $knowledge_base_enabled" echo "IntelliAgent Knowledge Base Enabled: $knowledge_base_intelliagent_enabled" From b8c0618a31705dddd36289159c235797a8237a04 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Sun, 27 Oct 2024 19:40:53 +0800 Subject: [PATCH 030/110] update shell script --- api_test/buildspec-20241012.yaml | 9 ++--- api_test/config.json | 62 ++++++++++++++++++++++++++++++++ source/script/build.sh | 15 -------- 3 files changed, 65 insertions(+), 21 deletions(-) create mode 100644 api_test/config.json diff --git a/api_test/buildspec-20241012.yaml b/api_test/buildspec-20241012.yaml index 9a4f83716..6cc9092e4 100644 --- a/api_test/buildspec-20241012.yaml +++ b/api_test/buildspec-20241012.yaml @@ -66,12 +66,9 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" - sed -i "s/support@example.com/${SUB_EMAIL}/g" bin/config.ts - sed -i "s/provider: \"bedrock\"/provider: \"sagemaker\"/g" bin/config.ts - sed -i "s/name: \"amazon.titan-embed-text-v2\"/name: \"bce-embedding-and-bge-reranker\"/g" bin/config.ts - sed -i "s/commitId: \"\"/commitId: \"43972580a35ceacacd31b95b9f430f695d07dde9\"/g" bin/config.ts - sed -i "s/dimensions: 1024,/dimensions: 768/g" bin/config.ts - sed -i "s/default: true//g" bin/config.ts + sed -i "s#./bin/config.json#../../api_test/config.json#g" bin/config.ts + sed -i "s#./bin/config.json#../../api_test/config.json#g" cli/magic-config.ts + sed -i "s#../infrastructure/bin/config.json#../../api_test/config.json#g" ../script/build.sh npm i -g pnpm pnpm i # echo "bin/config.ts >>>>>>>" diff --git a/api_test/config.json b/api_test/config.json new file mode 100644 index 000000000..7fc376e5c --- /dev/null +++ b/api_test/config.json @@ -0,0 +1,62 @@ +{ + "prefix": "", + "email": "cuihubin@amazon.com", + "deployRegion": "us-east-1", + "knowledgeBase": { + "enabled": true, + "knowledgeBaseType": { + "intelliAgentKb": { + "enabled": true, + "vectorStore": { + "opensearch": { + "enabled": true, + "useCustomDomain": false, + "customDomainEndpoint": "" + } + }, + "knowledgeBaseModel": { + "enabled": true, + "ecrRepository": "intelli-agent-knowledge-base", + "ecrImageTag": "latest" + } + } + } + }, + "chat": { + "enabled": true, + "bedrockRegion": "us-east-1", + "amazonConnect": { + "enabled": true + } + }, + "model": { + "embeddingsModels": [ + { + "provider": "sagemaker", + "name": "bce-embedding-and-bge-reranker", + "commitId": "43972580a35ceacacd31b95b9f430f695d07dde9", + "dimensions": 768 + } + ], + "llms": [ + { + "provider": "bedrock", + "name": "anthropic.claude-3-sonnet-20240229-v1:0" + } + ], + "modelConfig": { + "modelAssetsBucket": "intelli-agent-models-544919262599-us-east-1" + } + }, + "ui": { + "enabled": true + }, + "federatedAuth": { + "enabled": true, + "provider": { + "cognito": { + "enabled": true + } + } + } +} \ No newline at end of file diff --git a/source/script/build.sh b/source/script/build.sh index bbd4ce447..16784dae3 100644 --- a/source/script/build.sh +++ b/source/script/build.sh @@ -4,21 +4,6 @@ set -e # Load config.json config_file="../infrastructure/bin/config.json" -# 检查 config.json 是否存在 -if [[ ! -f $config_file ]]; then - # 调用 TypeScript 文件中的方法获取返回值 - config_file=$(node -e "require('../infrastructure/bin/config.ts').getConfig()") -# knowledge_base_enabled=$(jq -r '.knowledgeBase.enabled' $config_file) -# knowledge_base_intelliagent_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled' $config_file) -# knowledge_base_models_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.enabled' $config_file) -# ecr_repository=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.ecrRepository' $config_file) -# ecr_image_tag=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.ecrImageTag' $config_file) -# opensearch_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.vectorStore.opensearch.enabled' $config_file) -# embedding_model_provider=$(jq -r '.model.embeddingsModels[0].provider' $config_file) -# model_assets_bucket=$(jq -r '.model.modelConfig.modelAssetsBucket' $config_file) -# ui_enabled=$(jq -r '.ui.enabled' $config_file) -# else -fi knowledge_base_enabled=$(jq -r '.knowledgeBase.enabled' $config_file) knowledge_base_intelliagent_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled' $config_file) knowledge_base_models_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.knowledgeBaseModel.enabled' $config_file) From a0acf0d27ef46db61f2a2e2b2be13872e20eb718 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Sun, 27 Oct 2024 23:36:34 +0800 Subject: [PATCH 031/110] add use third kb pattern --- ...pec-20241012.yaml => buildspec-third.yaml} | 13 ++-- api_test/buildspec.yaml | 50 +++++++-------- api_test/config-third.json | 63 +++++++++++++++++++ 3 files changed, 95 insertions(+), 31 deletions(-) rename api_test/{buildspec-20241012.yaml => buildspec-third.yaml} (95%) create mode 100644 api_test/config-third.json diff --git a/api_test/buildspec-20241012.yaml b/api_test/buildspec-third.yaml similarity index 95% rename from api_test/buildspec-20241012.yaml rename to api_test/buildspec-third.yaml index 6cc9092e4..f7d408905 100644 --- a/api_test/buildspec-20241012.yaml +++ b/api_test/buildspec-third.yaml @@ -66,9 +66,10 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" - sed -i "s#./bin/config.json#../../api_test/config.json#g" bin/config.ts - sed -i "s#./bin/config.json#../../api_test/config.json#g" cli/magic-config.ts - sed -i "s#../infrastructure/bin/config.json#../../api_test/config.json#g" ../script/build.sh + sed -i "s#aws_region=$(aws configure get region)#aws_region=\"us-east-1\"#g" ../model/etl/code/model.sh + sed -i "s#./bin/config.json#../../api_test/config-third.json#g" bin/config.ts + sed -i "s#./bin/config.json#../../api_test/config-third.json#g" cli/magic-config.ts + sed -i "s#../infrastructure/bin/config.json#../../api_test/config-third.json#g" ../script/build.sh npm i -g pnpm pnpm i # echo "bin/config.ts >>>>>>>" @@ -194,9 +195,9 @@ phases: echo "----------------------------------------------------------------" report_datetime=$(date +"%Y-%m-%d_%H-%M-%S") project_name="Intelli-Agent" - original_filename="report.html" - original_json_filename="detail.json" - original_log="detail.log" + original_filename="report_third.html" + original_json_filename="detail_third.json" + original_log="detail_third.log" s3_key=${report_datetime}_${original_json_filename} log=${report_datetime}_${original_log} pushd Intelli-Agent/api_test || exit 1 diff --git a/api_test/buildspec.yaml b/api_test/buildspec.yaml index 46da4883f..3fcb47737 100644 --- a/api_test/buildspec.yaml +++ b/api_test/buildspec.yaml @@ -3,9 +3,9 @@ version: 0.2 env: variables: REPOSITORY_URL: "https://github.com/aws-samples/Intelli-Agent.git" - CODE_BRANCH: "dev" + CODE_BRANCH: "main" PROJECT_NAME: "Intelli-Agent" - STACK_NAME: "intelli-agent" + STACK_NAME: "ai-customer-service" DEPLOY_STACK: "cdk" CLEAN_RESOURCES: "no" TEST_FAST: "false" @@ -23,15 +23,15 @@ env: phases: install: commands: - - aws cloudformation delete-stack --stack-name "$STACK_NAME" + - aws cloudformation delete-stack --stack-name $STACK_NAME - | - aws cloudformation wait stack-delete-complete --stack-name "$STACK_NAME" + aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME WAIT_STATUS=$? if [ $WAIT_STATUS -eq 0 ]; then echo "Stack deletion complete." else echo "Failed to delete stack." - reason_detail=$(aws cloudformation describe-stack-events --stack-name intelli-agent | jq '[.StackEvents[] | select(.ResourceType=="AWS::EC2::Subnet" and .ResourceStatus=="DELETE_FAILED")] | last') + reason_detail=$(aws cloudformation describe-stack-events --stack-name $STACK_NAME | jq '[.StackEvents[] | select(.ResourceType=="AWS::EC2::Subnet" and .ResourceStatus=="DELETE_FAILED")] | last') reason=$(echo $reason_detail | jq -r '.ResourceStatusReason') echo $reason | jq -R --arg reason "$reason" '{"error_msg":$reason}' > clear_resources.json cat clear_resources.json @@ -41,8 +41,8 @@ phases: echo $lambda_status if [ $lambda_status -eq 200 ]; then echo "Lambda executed successfully. Proceeding with stack deletion." - aws cloudformation delete-stack --stack-name "$STACK_NAME" - aws cloudformation wait stack-delete-complete --stack-name "$STACK_NAME" + aws cloudformation delete-stack --stack-name $STACK_NAME + aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME if [ $? -eq 0 ]; then echo "Stack deletion complete after Lambda execution." else @@ -58,12 +58,6 @@ phases: echo "$install_start_time install start..." echo "----------------------------------------------------------------" git clone $REPOSITORY_URL --branch $CODE_BRANCH --single-branch - echo "build portal assets >>>" - pushd Intelli-Agent/source/portal - npm i -g pnpm - pnpm i - npm run build - popd deploy_start_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" echo "$deploy_start_time deploy start..." @@ -72,11 +66,21 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" - npx cdk synth 2>&1 | tee synth.log + sed -i "s#aws_region=$(aws configure get region)#aws_region=\"us-east-1\"#g" ../model/etl/code/model.sh + sed -i "s#./bin/config.json#../../api_test/config.json#g" bin/config.ts + sed -i "s#./bin/config.json#../../api_test/config.json#g" cli/magic-config.ts + sed -i "s#../infrastructure/bin/config.json#../../api_test/config.json#g" ../script/build.sh + npm i -g pnpm + pnpm i + # echo "bin/config.ts >>>>>>>" + # cat bin/config.ts + npm run build + npx cdk synth > synth.log 2>&1 if [ ${PIPESTATUS[0]} -ne 0 ]; then echo "cdk synth failed" + # jq -n --arg error "$(cat synth.log)" '{detail: $error}' > sync_error.json jq -n --arg error "$(cat synth.log)" '{detail: $error, status: "unCompleted", build_url: $CODEBUILD_BUILD_URL, project_name: $PROJECT_NAME, topic: $TOPIC, repository: $REPOSITORY_URL, branch: $CODE_BRANCH}' > sync_error.json - aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://sync_error.json sync_error.json + aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://sync_error.json response.json exit 1 else echo "Synthesizing complete." @@ -87,17 +91,13 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then pushd "Intelli-Agent/source/infrastructure" pnpm i - npx cdk deploy --parameters SubEmail=$SUB_EMAIL \ - --parameters S3ModelAssets="$S3_MODEL_ASSETS_BUCKET" \ - --parameters EtlImageName="$ETL_REPOSITORY_NAME" \ - --parameters ETLTag="$ETL_IMAGE_TAG" \ - --require-approval never + npx cdk deploy $STACK_NAME --require-approval never deploy_exit_code=$? if [ $deploy_exit_code -ne 0 ]; then echo "CDK deployment failed. Sending email and exiting with status code 1." - msg=$(aws cloudformation describe-stack-events --stack-name intelli-agent --query "StackEvents[?ResourceStatus=='CREATE_FAILED'] | [-1]") + msg=$(aws cloudformation describe-stack-events --stack-name $STACK_NAME --query "StackEvents[?ResourceStatus=='CREATE_FAILED'] | [-1]") if [ -z "$msg" ]; then - msg=$(aws cloudformation describe-stack-events --stack-name intelli-agent | jq '[.StackEvents[] | select(.ResourceType=="AWS::EC2::Subnet" and .ResourceStatus=="DELETE_FAILED")] | last'); + msg=$(aws cloudformation describe-stack-events --stack-name $STACK_NAME | jq '[.StackEvents[] | select(.ResourceType=="AWS::EC2::Subnet" and .ResourceStatus=="DELETE_FAILED")] | last'); fi echo "{\"project_name\":\"$PROJECT_NAME\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"unCompleted\",\"detail\":\"$msg\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json @@ -131,11 +131,11 @@ phases: pip3 install --upgrade pip pip3 --default-timeout=6000 install -r requirements.txt popd - stack_info=$(aws cloudformation describe-stacks --stack-name "$STACK_NAME") + stack_info=$(aws cloudformation describe-stacks --stack-name $STACK_NAME) rest_api_gateway_url=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="APIEndpointAddress").OutputValue') ws_api_gateway_url=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="WebSocketEndpointAddress").OutputValue') - user_pool_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="UserPoolId").OutputValue') - oidc_client_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="OidcClientId").OutputValue') + user_pool_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="UserPoolID").OutputValue') + oidc_client_id=$(echo "$stack_info" | jq -r '.Stacks[0].Outputs[] | select(.OutputKey=="OIDCClientID").OutputValue') aws cognito-idp admin-create-user \ --user-pool-id $user_pool_id \ --username lvning@amazon.com \ diff --git a/api_test/config-third.json b/api_test/config-third.json new file mode 100644 index 000000000..5d4e7debc --- /dev/null +++ b/api_test/config-third.json @@ -0,0 +1,63 @@ +{ + "prefix": "", + "email": "cuihubin@amazon.com", + "deployRegion": "us-east-1", + "knowledgeBase": { + "enabled": false, + "knowledgeBaseType": { + "intelliAgentKb": { + "enabled": true, + "vectorStore": { + "opensearch": { + "enabled": true, + "useCustomDomain": false, + "customDomainEndpoint": "" + } + }, + "knowledgeBaseModel": { + "enabled": true, + "ecrRepository": "intelli-agent-knowledge-base", + "ecrImageTag": "latest" + } + } + } + }, + "chat": { + "enabled": true, + "bedrockRegion": "us-east-1", + "amazonConnect": { + "enabled": true + } + }, + "model": { + "embeddingsModels": [ + { + "provider": "bedrock", + "name": "amazon.titan-embed-text-v2:0", + "commitId": "", + "dimensions": 1024, + "default": true + } + ], + "llms": [ + { + "provider": "bedrock", + "name": "anthropic.claude-3-sonnet-20240229-v1:0" + } + ], + "modelConfig": { + "modelAssetsBucket": "intelli-agent-models-456882501179-us-east-1" + } + }, + "ui": { + "enabled": true + }, + "federatedAuth": { + "enabled": true, + "provider": { + "cognito": { + "enabled": true + } + } + } +} \ No newline at end of file From 1754967f46e991c760e11cd2ad024723ce31166e Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Mon, 28 Oct 2024 10:09:34 +0800 Subject: [PATCH 032/110] update third config.json --- api_test/buildspec-third.yaml | 2 +- api_test/buildspec.yaml | 2 +- api_test/config-third.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/api_test/buildspec-third.yaml b/api_test/buildspec-third.yaml index f7d408905..0d10a63d5 100644 --- a/api_test/buildspec-third.yaml +++ b/api_test/buildspec-third.yaml @@ -207,7 +207,7 @@ phases: aws s3 cp $original_json_filename s3://${API_TEST_RESULT_BUCKET}/$s3_key aws s3 cp $original_log s3://${API_TEST_RESULT_BUCKET}/$log popd - echo "{\"project_name\":\"$PROJECT_NAME\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json + echo "{\"project_name\":\"$PROJECT_NAME\",\"phase\":\"third\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json - | post_build_end_time=$(date +"%Y-%m-%d_%H-%M-%S") diff --git a/api_test/buildspec.yaml b/api_test/buildspec.yaml index 3fcb47737..3c50283de 100644 --- a/api_test/buildspec.yaml +++ b/api_test/buildspec.yaml @@ -207,7 +207,7 @@ phases: aws s3 cp $original_json_filename s3://${API_TEST_RESULT_BUCKET}/$s3_key aws s3 cp $original_log s3://${API_TEST_RESULT_BUCKET}/$log popd - echo "{\"project_name\":\"$PROJECT_NAME\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json + echo "{\"project_name\":\"$PROJECT_NAME\",\"phase\":\"kb\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json - | post_build_end_time=$(date +"%Y-%m-%d_%H-%M-%S") diff --git a/api_test/config-third.json b/api_test/config-third.json index 5d4e7debc..bdc867e0f 100644 --- a/api_test/config-third.json +++ b/api_test/config-third.json @@ -1,5 +1,5 @@ { - "prefix": "", + "prefix": "third", "email": "cuihubin@amazon.com", "deployRegion": "us-east-1", "knowledgeBase": { From 0c5de4355a631dc4ef211beeb821e9faccc2ed32 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Mon, 28 Oct 2024 10:24:10 +0800 Subject: [PATCH 033/110] add package remark-html --- source/portal/package-lock.json | 2 +- source/portal/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/source/portal/package-lock.json b/source/portal/package-lock.json index e0b64e9b9..fa83327bf 100644 --- a/source/portal/package-lock.json +++ b/source/portal/package-lock.json @@ -28,7 +28,7 @@ "react-spinners": "^0.13.8", "react-use-websocket": "^4.8.1", "remark-gfm": "^4.0.0", - "remark-html": "16.0.1", + "remark-html": "^16.0.1", "sass": "^1.74.1", "uuid": "^9.0.1" }, diff --git a/source/portal/package.json b/source/portal/package.json index 287fd7e8f..0a071d4ce 100644 --- a/source/portal/package.json +++ b/source/portal/package.json @@ -30,7 +30,7 @@ "react-spinners": "^0.13.8", "react-use-websocket": "^4.8.1", "remark-gfm": "^4.0.0", - "remark-html": "16.0.1", + "remark-html": "^16.0.1", "sass": "^1.74.1", "uuid": "^9.0.1" }, From 5ed6c89c0bd0bc2c7dc17cc4c713245f84906c92 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Mon, 28 Oct 2024 04:00:38 +0000 Subject: [PATCH 034/110] feat: add exception handler --- .../online/common_logic/common_utils/response_utils.py | 2 +- source/lambda/online/lambda_main/main.py | 7 ++++--- .../lambda_main/main_utils/online_entries/common_entry.py | 3 +-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/response_utils.py b/source/lambda/online/common_logic/common_utils/response_utils.py index 4f8637942..5b94e8c15 100644 --- a/source/lambda/online/common_logic/common_utils/response_utils.py +++ b/source/lambda/online/common_logic/common_utils/response_utils.py @@ -71,7 +71,7 @@ def stream_response(event_body:dict, response:dict): ws_connection_id = event_body["ws_connection_id"] custom_message_id = event_body["custom_message_id"] answer = response["answer"] - figure = response.get("ddb_additional_kwargs").get("figure") + figure = response.get("ddb_additional_kwargs", {}).get("figure") if isinstance(answer, str): answer = iter([answer]) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index f8dd1d83a..2905d0249 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -14,6 +14,7 @@ from common_logic.common_utils.logger_utils import get_logger from common_logic.common_utils.websocket_utils import load_ws_client from lambda_main.main_utils.online_entries import get_entry +from common_logic.common_utils.response_utils import process_response logger = get_logger("main") @@ -369,7 +370,7 @@ def lambda_handler(event_body: dict, context: dict): run_type = ExecutionType.WEBSOCKET_API return default_event_handler(event_body, context, entry_executor) except Exception as e: - if ExecutionType.WEBSOCKET_API == run_type: - pass - logger.error(f"An error occurred: {e}") + error_response = {"answer": str(e), "extra_response": {}} + process_response(event_body, error_response) + logger.error(f"An error occurred: {str(e)}") return {"error": str(e)} diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index d06cde0a5..e983147c8 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -136,7 +136,6 @@ def format_intention_output(data): kwargs = ', '.join( [f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" - logger.info(markdown_table) return markdown_table @@ -478,7 +477,7 @@ def register_rag_tool_from_config(event_body: dict): chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) logger.info(chatbot) for index_type, item_dict in chatbot.index_ids.items(): - if index_type != IndexType.INTENTION: + if index_type == IndexType.QD: for index_content in item_dict["value"].values(): if "indexId" in index_content and "description" in index_content: register_rag_tool( From 0b8ef518b7a267fc5d6dcb285cbe6a3d556e6695 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Mon, 28 Oct 2024 13:27:17 +0800 Subject: [PATCH 035/110] bugfix: update kb condition --- source/infrastructure/lib/api/api-stack.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/source/infrastructure/lib/api/api-stack.ts b/source/infrastructure/lib/api/api-stack.ts index a0b99897e..254f608b4 100644 --- a/source/infrastructure/lib/api/api-stack.ts +++ b/source/infrastructure/lib/api/api-stack.ts @@ -141,7 +141,8 @@ export class ApiConstruct extends Construct { identitySources: [apigw.IdentitySource.header('Authorization')], }); - if (props.config.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled) { + // if (props.config.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled) { + if (props.config.knowledgeBase.enabled && props.config.knowledgeBase.knowledgeBaseType.intelliAgentKb.enabled) { const embeddingLambda = new LambdaFunction(this, "lambdaEmbedding", { code: Code.fromAsset(join(__dirname, "../../../lambda/embedding")), vpc: vpc, From da15f669007368dbca42a422b91a027c93782b1d Mon Sep 17 00:00:00 2001 From: NingLyu Date: Mon, 28 Oct 2024 08:33:02 +0000 Subject: [PATCH 036/110] fix: bcembedding issue --- source/lambda/online/lambda_main/main.py | 4 ---- source/model/bce_embedding/code/requirements.txt | 3 ++- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 2905d0249..12934ebb0 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -352,22 +352,18 @@ def default_event_handler(event_body: dict, context: dict, entry_executor): @chatbot_lambda_call_wrapper def lambda_handler(event_body: dict, context: dict): logger.info(f"Raw event_body: {event_body}") - run_type = ExecutionType.RESTFUL_API entry_type = event_body.get("entry_type", EntryType.COMMON).lower() try: entry_executor = get_entry(entry_type) stream = context["stream"] if event_body.get("source", "") == "aws.cases": # Amazon Connect case event - run_type = ExecutionType.AMAZON_CONNECT return connect_case_event_handler(event_body, context, entry_executor) elif not stream: # Restful API - run_type = ExecutionType.RESTFUL_API return restapi_event_handler(event_body, context, entry_executor) else: # WebSocket API - run_type = ExecutionType.WEBSOCKET_API return default_event_handler(event_body, context, entry_executor) except Exception as e: error_response = {"answer": str(e), "extra_response": {}} diff --git a/source/model/bce_embedding/code/requirements.txt b/source/model/bce_embedding/code/requirements.txt index 00969b63a..091ec6134 100644 --- a/source/model/bce_embedding/code/requirements.txt +++ b/source/model/bce_embedding/code/requirements.txt @@ -1,2 +1,3 @@ FlagEmbedding==1.2.5 -BCEmbedding==0.1.3 \ No newline at end of file +BCEmbedding==0.1.3 +transformers<4.46.0 From 5bf868f2a3695851df6a008b76cf2510095b4b9a Mon Sep 17 00:00:00 2001 From: NingLyu Date: Mon, 28 Oct 2024 08:37:33 +0000 Subject: [PATCH 037/110] chore: remove unused code --- source/lambda/online/common_logic/common_utils/constant.py | 6 ------ source/lambda/online/lambda_main/main.py | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/constant.py b/source/lambda/online/common_logic/common_utils/constant.py index 6872a9a95..518d35daf 100644 --- a/source/lambda/online/common_logic/common_utils/constant.py +++ b/source/lambda/online/common_logic/common_utils/constant.py @@ -171,9 +171,3 @@ class IndexTag(Enum): @unique class KBType(Enum): AOS = "aos" - - -class ExecutionType(Enum): - AMAZON_CONNECT = "amazon_connect" - RESTFUL_API = "restful_api" - WEBSOCKET_API = "websocket_api" diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 12934ebb0..379b8906d 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -5,7 +5,7 @@ import boto3 from botocore.exceptions import ClientError -from common_logic.common_utils.constant import EntryType, ExecutionType +from common_logic.common_utils.constant import EntryType from common_logic.common_utils.ddb_utils import DynamoDBChatMessageHistory from common_logic.common_utils.lambda_invoke_utils import ( chatbot_lambda_call_wrapper, From b00de062f66e13704b47e0a723add324eacb17f9 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Mon, 28 Oct 2024 08:45:50 +0000 Subject: [PATCH 038/110] chore: format code --- source/lambda/online/lambda_main/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 379b8906d..aca2226e6 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -5,7 +5,7 @@ import boto3 from botocore.exceptions import ClientError -from common_logic.common_utils.constant import EntryType +from common_logic.common_utils.constant import EntryType from common_logic.common_utils.ddb_utils import DynamoDBChatMessageHistory from common_logic.common_utils.lambda_invoke_utils import ( chatbot_lambda_call_wrapper, From fca430bf85b75e5753b655ada18a9a3dc5170fe9 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Mon, 28 Oct 2024 16:49:57 +0800 Subject: [PATCH 039/110] update third config --- api_test/buildspec-third.yaml | 4 ++-- source/infrastructure/bin/main.ts | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/api_test/buildspec-third.yaml b/api_test/buildspec-third.yaml index 0d10a63d5..06a4a7010 100644 --- a/api_test/buildspec-third.yaml +++ b/api_test/buildspec-third.yaml @@ -5,7 +5,7 @@ env: REPOSITORY_URL: "https://github.com/aws-samples/Intelli-Agent.git" CODE_BRANCH: "main" PROJECT_NAME: "Intelli-Agent" - STACK_NAME: "ai-customer-service" + STACK_NAME: "third-ai-customer-service" DEPLOY_STACK: "cdk" CLEAN_RESOURCES: "no" TEST_FAST: "false" @@ -180,7 +180,7 @@ phases: cat .env source agentVenv/bin/activate # pytest test_case --continue-on-collection-errors --log-cli-level=INFO - pytest test_case --continue-on-collection-errors --log-cli-level=INFO --json-report --json-report-file=detail.json --html=report.html --self-contained-html > detail.log + pytest test_case --continue-on-collection-errors --log-cli-level=INFO --json-report --json-report-file=detail_third.json --html=report_third.html --self-contained-html > detail_third.log popd test_complete_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" diff --git a/source/infrastructure/bin/main.ts b/source/infrastructure/bin/main.ts index b7bee8f25..55e19322d 100644 --- a/source/infrastructure/bin/main.ts +++ b/source/infrastructure/bin/main.ts @@ -148,7 +148,10 @@ const devEnv = { }; const app = new App(); -const stackName = `${config.prefix}ai-customer-service`; +let stackName = "ai-customer-service" +if(config.prefix && config.prefix.trim().length > 0){ + stackName = `${config.prefix}-ai-customer-service`; +} new RootStack(app, stackName, { config, env: devEnv, suppressTemplateIndentation: true }); app.synth(); From 186f193fc2c750a44e0b0058a2e5056dc8bf3eb9 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Mon, 28 Oct 2024 08:54:41 +0000 Subject: [PATCH 040/110] fix: fix transformer issue --- source/model/bce_embedding/code/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/source/model/bce_embedding/code/requirements.txt b/source/model/bce_embedding/code/requirements.txt index 00969b63a..091ec6134 100644 --- a/source/model/bce_embedding/code/requirements.txt +++ b/source/model/bce_embedding/code/requirements.txt @@ -1,2 +1,3 @@ FlagEmbedding==1.2.5 -BCEmbedding==0.1.3 \ No newline at end of file +BCEmbedding==0.1.3 +transformers<4.46.0 From 2d0e9cb0a416b8ad653a3a7750c0150491886ac8 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 29 Oct 2024 01:01:52 +0000 Subject: [PATCH 041/110] fix: add qq in retriever --- .../lambda_main/main_utils/online_entries/common_entry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index e983147c8..020be7f09 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -477,7 +477,7 @@ def register_rag_tool_from_config(event_body: dict): chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) logger.info(chatbot) for index_type, item_dict in chatbot.index_ids.items(): - if index_type == IndexType.QD: + if index_type != IndexType.INTENTION: for index_content in item_dict["value"].values(): if "indexId" in index_content and "description" in index_content: register_rag_tool( From dc25e4fc3d62ab007f8435de26e7e6afc11d2e47 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 29 Oct 2024 01:44:07 +0000 Subject: [PATCH 042/110] feat: add score and source in rag trace --- .../functions/lambda_common_tools/rag.py | 35 ++++++++++++++----- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index 05791454e..cbab78db7 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -6,23 +6,41 @@ from common_logic.common_utils.lambda_invoke_utils import send_trace -def format_rag_data(data): +def _generate_markdown_link(file_path: str) -> str: + file_name = file_path.split("/")[-1] + markdown_link = f"[{file_name}]({file_path})" + return markdown_link + + +def format_rag_data(data) -> str: + """ + Formats the given data into a markdown table. + + Args: + data (list): A list of dictionaries containing 'source', 'score', and 'page_content' keys. + + Returns: + str: A markdown table string representing the formatted data. + """ if data is None or len(data) == 0: return "" - markdown_table = "| RAG Context |\n" - markdown_table += "|-----|\n" + markdown_table = "| Source | Score | RAG Context |\n" + markdown_table += "|-----|-----|-----|\n" for item in data: - item = item.replace("\n", "
") - markdown_table += f"| {item} |\n" + source = _generate_markdown_link(item.get("source", "")) + score = item.get("score", -1) + page_content = item.get("page_content", "").replace("\n", "
") + markdown_table += f"| {source} | {score} | {page_content} |\n" return markdown_table def lambda_handler(event_body, context=None): state = event_body['state'] + print(event_body) context_list = [] - # add qq match results + # Add qq match results context_list.extend(state['qq_match_results']) figure_list = [] retriever_params = state["chatbot_config"]["private_knowledge_config"] @@ -34,6 +52,8 @@ def lambda_handler(event_body, context=None): lambda_module_path="functions.functions_utils.retriever.retriever", handler_name="lambda_handler", ) + print("RAG debug") + print(output) for doc in output["result"]["docs"]: context_list.append(doc["page_content"]) @@ -44,7 +64,7 @@ def lambda_handler(event_body, context=None): unique_figure_list = [dict(t) for t in unique_set] state['extra_response']['figures'] = unique_figure_list - context_md = format_rag_data(context_list) + context_md = format_rag_data(output["result"]["docs"]) send_trace( f"\n\n{context_md}\n\n", enable_trace=state["enable_trace"]) @@ -77,6 +97,5 @@ def lambda_handler(event_body, context=None): }, }, ) - # return {"code": 0, "result": output} From 57ea7174ee2c03d1647d58e1684b7a337796f686 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 29 Oct 2024 07:28:33 +0000 Subject: [PATCH 043/110] feat: add details in monitor --- .../common_utils/monitor_utils.py | 111 ++++++++++++++++++ .../functions/lambda_common_tools/rag.py | 37 +----- .../main_utils/online_entries/common_entry.py | 47 +------- .../src/pages/chatbot/components/Message.css | 2 + 4 files changed, 122 insertions(+), 75 deletions(-) create mode 100644 source/lambda/online/common_logic/common_utils/monitor_utils.py diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py new file mode 100644 index 000000000..7d75a34c6 --- /dev/null +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -0,0 +1,111 @@ +import logging + + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +def _generate_markdown_link(file_path: str) -> str: + file_name = file_path.split("/")[-1] + markdown_link = f"[{file_name}]({file_path})" + return markdown_link + + +def format_qq_data(data) -> str: + """ + Formats QQ match result. + + Args: + data (list): A list of dictionaries containing 'source', 'score', and 'page_content' keys. + + Returns: + str: A markdown table string representing the formatted data. + """ + if data is None or len(data) == 0: + return "" + + markdown_table = "**QQ Match Result**\n" + markdown_table += "| Source | Score | Question | Answer |\n" + markdown_table += "|-----|-----|-----|-----|\n" + + for qq_item in data: + qq_source = _generate_markdown_link(qq_item.get("source", "")) + qq_score = qq_item.get("score", -1) + qq_question = qq_item.get("page_content", "").replace("\n", "
") + qq_answer = qq_item.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + + return markdown_table + + +def format_rag_data(data, qq_result) -> str: + """ + Formats the given data into a markdown table. + + Args: + data (list): A list of dictionaries containing 'source', 'score', and 'page_content' keys. + qq_result (list): QQ match result + + Returns: + str: A markdown table string representing the formatted data. + """ + if data is None or len(data) == 0: + return "" + + markdown_table = "| Source | Score | RAG Context |\n" + markdown_table += "|-----|-----|-----|\n" + for item in data: + source = _generate_markdown_link(item.get("source", "")) + score = item.get("score", -1) + page_content = item.get("page_content", "").replace("\n", "
") + markdown_table += f"| {source} | {score} | {page_content} |\n\n" + + markdown_table += "**QQ Match Result**\n" + markdown_table += "| Source | Score | Question | Answer |\n" + markdown_table += "|-----|-----|-----|-----|\n" + + for qq_item in qq_result: + qq_source = _generate_markdown_link(qq_item.get("source", "")) + qq_score = qq_item.get("score", -1) + qq_question = qq_item.get("page_content", "").replace("\n", "
") + qq_answer = qq_item.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + + return markdown_table + + +def is_null_or_empty(value): + if value is None: + return True + elif isinstance(value, (dict, list, str)) and not value: + return True + return False + + +def format_preprocess_output(ori_query, rewrite_query): + if is_null_or_empty(ori_query) or is_null_or_empty(rewrite_query): + return "" + + markdown_table = "| Original Query | Rewritten Query |\n" + markdown_table += "|-------|-------|\n" + markdown_table += f"| {ori_query} | {rewrite_query} |\n" + + return markdown_table + + +def format_intention_output(data): + if is_null_or_empty(data): + return "" + + markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" + markdown_table += "|-------|-------|-------|-------|-------|\n" + for item in data: + query = item.get("query", "") + score = item.get("score", "") + name = item.get("name", "") + intent = item.get("intent", "") + kwargs = ', '.join( + [f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) + markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" + + return markdown_table diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index cbab78db7..01170fabd 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -4,44 +4,15 @@ LLMTaskType ) from common_logic.common_utils.lambda_invoke_utils import send_trace - - -def _generate_markdown_link(file_path: str) -> str: - file_name = file_path.split("/")[-1] - markdown_link = f"[{file_name}]({file_path})" - return markdown_link - - -def format_rag_data(data) -> str: - """ - Formats the given data into a markdown table. - - Args: - data (list): A list of dictionaries containing 'source', 'score', and 'page_content' keys. - - Returns: - str: A markdown table string representing the formatted data. - """ - if data is None or len(data) == 0: - return "" - - markdown_table = "| Source | Score | RAG Context |\n" - markdown_table += "|-----|-----|-----|\n" - for item in data: - source = _generate_markdown_link(item.get("source", "")) - score = item.get("score", -1) - page_content = item.get("page_content", "").replace("\n", "
") - markdown_table += f"| {source} | {score} | {page_content} |\n" - - return markdown_table +from common_logic.common_utils.monitor_utils import format_rag_data def lambda_handler(event_body, context=None): - state = event_body['state'] + state = event_body["state"] print(event_body) context_list = [] # Add qq match results - context_list.extend(state['qq_match_results']) + context_list.extend(state["qq_match_results"]) figure_list = [] retriever_params = state["chatbot_config"]["private_knowledge_config"] retriever_params["query"] = state[retriever_params.get( @@ -64,7 +35,7 @@ def lambda_handler(event_body, context=None): unique_figure_list = [dict(t) for t in unique_set] state['extra_response']['figures'] = unique_figure_list - context_md = format_rag_data(output["result"]["docs"]) + context_md = format_rag_data(output["result"]["docs"], state["qq_match_contexts"]) send_trace( f"\n\n{context_md}\n\n", enable_trace=state["enable_trace"]) diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index 020be7f09..ea618f864 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -1,4 +1,3 @@ -import json from typing import Annotated, Any, TypedDict from common_logic.common_utils.chatbot_utils import ChatbotManager @@ -20,6 +19,7 @@ from common_logic.common_utils.python_utils import add_messages, update_nest_dict from common_logic.common_utils.response_utils import process_response from common_logic.common_utils.serialization_utils import JSONEncoder +from common_logic.common_utils.monitor_utils import format_intention_output, format_preprocess_output, format_qq_data from functions import get_tool_by_name from functions._tool_base import tool_manager from functions.lambda_common_tools import rag @@ -79,6 +79,7 @@ class ChatbotState(TypedDict): ########### retriever states ########### # contexts information retrieved in search engine, e.g. OpenSearch qq_match_results: list = [] + qq_match_contexts: dict contexts: str = None figure: list = None @@ -103,42 +104,6 @@ class ChatbotState(TypedDict): current_agent_tools_def: list -def is_null_or_empty(value): - if value is None: - return True - elif isinstance(value, (dict, list, str)) and not value: - return True - return False - - -def format_preprocess_output(ori_query, rewrite_query): - if is_null_or_empty(ori_query) or is_null_or_empty(rewrite_query): - return "" - - markdown_table = "| Original Query | Rewritten Query |\n" - markdown_table += "|-------|-------|\n" - markdown_table += f"| {ori_query} | {rewrite_query} |\n" - - return markdown_table - - -def format_intention_output(data): - if is_null_or_empty(data): - return "" - - markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" - markdown_table += "|-------|-------|-------|-------|-------|\n" - for item in data: - query = item.get("query", "") - score = item.get("score", "") - name = item.get("name", "") - intent = item.get("intent", "") - kwargs = ', '.join( - [f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) - markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" - - return markdown_table - #################### # nodes in graph # #################### @@ -160,10 +125,6 @@ def query_preprocess(state: ChatbotState): @node_monitor_wrapper def intention_detection(state: ChatbotState): - # if state['chatbot_config']['agent_config']['only_use_rag_tool']: - # return { - # "intent_type": "intention detected" - # } retriever_params = state["chatbot_config"]["qq_match_config"] retriever_params["query"] = state[ retriever_params.get("retriever_config", {}).get("query_key", "query") @@ -178,8 +139,9 @@ def intention_detection(state: ChatbotState): qq_match_threshold = retriever_params["threshold"] for doc in output["result"]["docs"]: if doc["retrieval_score"] > qq_match_threshold: + doc_md = format_qq_data(doc) send_trace( - f"\n\n**similar query found**\n{doc}", + f"\n\n**similar query found**\n\n{doc_md}", state["stream"], state["ws_connection_id"], state["enable_trace"], @@ -219,6 +181,7 @@ def intention_detection(state: ChatbotState): "intent_fewshot_examples": intent_fewshot_examples, "intent_fewshot_tools": intent_fewshot_tools, "qq_match_results": context_list, + "qq_match_contexts": output["result"]["docs"], "intent_type": "intention detected", } diff --git a/source/portal/src/pages/chatbot/components/Message.css b/source/portal/src/pages/chatbot/components/Message.css index f943489ba..753ec9f52 100644 --- a/source/portal/src/pages/chatbot/components/Message.css +++ b/source/portal/src/pages/chatbot/components/Message.css @@ -14,9 +14,11 @@ background-color: #f2f2f2; padding: 8px; border: 1px solid #ddd; + min-width: 120px; } .custom-table-cell { padding: 8px; border: 1px solid #ddd; + min-width: 120px; } \ No newline at end of file From b3c299c2f2f3ff589d8c29c2273ec5bf2d2fc238 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Tue, 29 Oct 2024 16:17:08 +0800 Subject: [PATCH 044/110] update gen report lambda --- api_test/buildspec-third.yaml | 15 +++--- api_test/buildspec.yaml | 17 +++--- api_test/gen-report-lambda.py | 97 +++++++++++++++++++++++------------ 3 files changed, 83 insertions(+), 46 deletions(-) diff --git a/api_test/buildspec-third.yaml b/api_test/buildspec-third.yaml index 06a4a7010..6b00583b0 100644 --- a/api_test/buildspec-third.yaml +++ b/api_test/buildspec-third.yaml @@ -23,6 +23,7 @@ env: phases: install: commands: + - export REPORT_DATE=$(date +"%Y-%m-%d") - aws cloudformation delete-stack --stack-name $STACK_NAME - | aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME @@ -34,7 +35,6 @@ phases: reason_detail=$(aws cloudformation describe-stack-events --stack-name $STACK_NAME | jq '[.StackEvents[] | select(.ResourceType=="AWS::EC2::Subnet" and .ResourceStatus=="DELETE_FAILED")] | last') reason=$(echo $reason_detail | jq -r '.ResourceStatusReason') echo $reason | jq -R --arg reason "$reason" '{"error_msg":$reason}' > clear_resources.json - cat clear_resources.json aws lambda invoke --function-name $CLEAR_RESOURCES_FUNCTION --payload fileb://clear_resources.json response.json cat response.json lambda_status=$(jq -r '.statusCode' < response.json) @@ -66,6 +66,7 @@ phases: if [ -d "Intelli-Agent/source/infrastructure" ]; then echo "Synthesizing start..." pushd "Intelli-Agent/source/infrastructure" + sed -i "s#{Aws.ACCOUNT_ID}#{Aws.ACCOUNT_ID}third#g" lib/user/user-construct.ts sed -i "s#aws_region=$(aws configure get region)#aws_region=\"us-east-1\"#g" ../model/etl/code/model.sh sed -i "s#./bin/config.json#../../api_test/config-third.json#g" bin/config.ts sed -i "s#./bin/config.json#../../api_test/config-third.json#g" cli/magic-config.ts @@ -193,22 +194,24 @@ phases: echo "----------------------------------------------------------------" echo "$post_build_start_time post build start..." echo "----------------------------------------------------------------" - report_datetime=$(date +"%Y-%m-%d_%H-%M-%S") project_name="Intelli-Agent" original_filename="report_third.html" original_json_filename="detail_third.json" original_log="detail_third.log" - s3_key=${report_datetime}_${original_json_filename} - log=${report_datetime}_${original_log} + original_payload="payload_third.json" + s3_key=${REPORT_DATE}_${original_json_filename} + log=${REPORT_DATE}_${original_log} + third_payload=${REPORT_DATE}_${original_payload} pushd Intelli-Agent/api_test || exit 1 pwd ls -l - aws s3 cp $original_filename s3://${API_TEST_RESULT_BUCKET}/${report_datetime}_${original_filename} + aws s3 cp $original_filename s3://${API_TEST_RESULT_BUCKET}/${REPORT_DATE}_${original_filename} aws s3 cp $original_json_filename s3://${API_TEST_RESULT_BUCKET}/$s3_key aws s3 cp $original_log s3://${API_TEST_RESULT_BUCKET}/$log popd echo "{\"project_name\":\"$PROJECT_NAME\",\"phase\":\"third\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json - aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json + aws s3 cp payload.json s3://$API_TEST_RESULT_BUCKET/$third_payload + # aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json - | post_build_end_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" diff --git a/api_test/buildspec.yaml b/api_test/buildspec.yaml index 3c50283de..575134cea 100644 --- a/api_test/buildspec.yaml +++ b/api_test/buildspec.yaml @@ -23,6 +23,7 @@ env: phases: install: commands: + - export REPORT_DATE=$(date +"%Y-%m-%d") - aws cloudformation delete-stack --stack-name $STACK_NAME - | aws cloudformation wait stack-delete-complete --stack-name $STACK_NAME @@ -193,22 +194,26 @@ phases: echo "----------------------------------------------------------------" echo "$post_build_start_time post build start..." echo "----------------------------------------------------------------" - report_datetime=$(date +"%Y-%m-%d_%H-%M-%S") project_name="Intelli-Agent" original_filename="report.html" original_json_filename="detail.json" original_log="detail.log" - s3_key=${report_datetime}_${original_json_filename} - log=${report_datetime}_${original_log} + original_payload="payload.json" + s3_key=${REPORT_DATE}_${original_json_filename} + log=${REPORT_DATE}_${original_log} + payload=${REPORT_DATE}_${original_payload} pushd Intelli-Agent/api_test || exit 1 pwd ls -l - aws s3 cp $original_filename s3://${API_TEST_RESULT_BUCKET}/${report_datetime}_${original_filename} + aws s3 cp $original_filename s3://${API_TEST_RESULT_BUCKET}/${REPORT_DATE}_${original_filename} aws s3 cp $original_json_filename s3://${API_TEST_RESULT_BUCKET}/$s3_key aws s3 cp $original_log s3://${API_TEST_RESULT_BUCKET}/$log popd - echo "{\"project_name\":\"$PROJECT_NAME\",\"phase\":\"kb\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json - aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://payload.json response.json + echo "{\"project_name\":\"$PROJECT_NAME\",\"phase\":\"kb\",\"build_url\":\"$CODEBUILD_BUILD_URL\",\"status\":\"completed\",\"s3_key\":\"$s3_key\",\"log\":\"$log\",\"topic\":\"$TOPIC\",\"repository\":\"$REPOSITORY_URL\",\"branch\":\"$CODE_BRANCH\"}" > payload.json + aws s3 cp payload.json s3://$API_TEST_RESULT_BUCKET/$payload + echo "{\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"date\":\"$REPORT_DATE\"}" > res.json + echo "{\"bucket\":\"$API_TEST_RESULT_BUCKET\",\"date\":\"$REPORT_DATE\"}" + aws lambda invoke --function-name $GEN_REPORT_FUNCTION --payload fileb://res.json response.json - | post_build_end_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" diff --git a/api_test/gen-report-lambda.py b/api_test/gen-report-lambda.py index 53d8124c6..ce1987be2 100644 --- a/api_test/gen-report-lambda.py +++ b/api_test/gen-report-lambda.py @@ -1,13 +1,12 @@ import json -from datetime import datetime import boto3 - -def __gen_completed_report(event): - s3_client = boto3.client('s3') - repository = event['repository'] if 'repository' in event else '-' - branch = event['branch'] if 'branch' in event else '-' - response = s3_client.get_object(Bucket=event['bucket'], Key=event['s3_key']) - log_response = s3_client.get_object(Bucket=event['bucket'], Key=event['log']) +__s3_client = boto3.client('s3') +def __gen_completed_message(bucket: str, date: str, payload_type: int): + detail_key=f"{date}_detail_third.json" if payload_type == 0 else f"{date}_detail.json" + log_key=f"{date}_detail_third.log" if payload_type == 0 else f"{date}_detail.log" + response = __s3_client.get_object(Bucket=bucket, Key=detail_key) + log_response = __s3_client.get_object(Bucket=bucket, Key=log_key) + message = '【BuiltIn KB】:\n' if payload_type == 1 else '【Third KB】:\n' content = log_response['Body'].read().decode('utf-8') target_substring = "=================================== FAILURES ===================================" end_target_substring = "=============================== warnings summary ===============================" @@ -47,48 +46,78 @@ def __gen_completed_report(event): passed_str += "\n\n" if passed != 0 else "None\n\n" failed_str += "\n\n" if failed != 0 else "None\n\n" error_str += "\n\n" if error != 0 else "None\n\n" - - status = "FAILED" if (failed + error) > 0 else "PASSED" - date_str = datetime.now().strftime('%Y-%m-%d') - total=passed+failed+error - if total==0: - coverage='-' - else: - coverage=passed/total - message = f"Hi, team!\nThe following is API autotest report for {date_str}.\n\n ============================ summary =============================\n REPOSITORY: {repository}\n BRANCH: {branch}\n TEST RESULT: {status}\n Total:{passed + failed + error} Passed:{passed} Failed:{failed} Error:{error}\n Coverage:{coverage}\n\n\n " message+= passed_str message+= failed_str message+= error_str message+="\n\n" message+="=========================== failures log =============================\n" message+=result_content - message+="\n ..." - message+=f"\n\n More details click: {event['build_url']}" - message+="\n\nBR.\nThanks" - # Publish to SNS - __send_report(event['topic'], f"[{event['project_name']}][{date_str}][{status}] API AutoTest Report", message) + message+="\n ...\n\n\n" + return passed, failed, error, message + -def __gen_uncompleted_report(event): - status = "DEPLOY: FAILED" - message = "Hi, team!\nThe stack deploy FAILED! The reason for the failure is as follows:" +def __gen_uncompleted_message(payload, payload_type): + message = '【BuiltIn KB】:\n' if payload_type == 1 else '【Third KB】:\n' + message+= "The stack deploy FAILED! The reason for the failure is as follows:" message+="\n\n" - message+=event['detail'] + message+=payload['detail'] message+="\n ..." - message+=f"\n\n More details click: {event['build_url']}" - message+="\n\nBR.\nThanks" - __send_report(event['topic'], f"[{event['project_name']}][{datetime.now().strftime('%Y-%m-%d')}][FAILED!] API AutoTest Report", message) + return message + # __send_report(event['topic'], f"[{event['project_name']}][{datetime.now().strftime('%Y-%m-%d')}][FAILED!] API AutoTest Report", message) def __send_report(topic, subject, message): sns_client = boto3.client('sns') sns_client.publish(TopicArn=topic, Subject=subject, Message=message) +def __gen_json_from_s3(bucket: str, date: str, keyword: str, payload_type: int): + keywords = keyword.split(".") + key=f"{date}_{keywords[0]}_third.{keywords[1]}" if payload_type == 0 else f"{date}_{keyword}" + return json.loads(__s3_client.get_object(Bucket=bucket, Key=key)['Body'].read().decode('utf-8')) + + +def lambda_handler(event, context): + bucket=event['bucket'] + date = event['date'] + status = "FAILED" + passed = 0 + failed = 0 + error = 0 + coverage='-' + third_passed = 0 + third_failed = 0 + third_error = 0 + third_coverage = '-' + # event={'project_name': 'Chatbot Portal with Agent', 'build_url': 'https://ap-northeast-1.console.aws.amazon.com/codebuild/home?region=ap-northeast-1#/builds/AgentApiTest:9d97a692-cc2c-4372-8538-58a192735f13/view/new', 'status': 'completed', 'bucket': 'intelli-agent-rag-ap-northeast-1-api-test', 's3_key': '2024-06-30_13-21-21_detail.json', 'log': '2024-06-30_13-21-21_detail.log', 'topic': 'arn:aws:sns:ap-northeast-1:544919262599:agent-developers'} + third_payload = __gen_json_from_s3(bucket, date, "payload.json", 0) + + payload = __gen_json_from_s3(bucket, date, "payload.json", 1) + + if payload.get('status ')=='completed' and third_payload.get('status')=='completed': + status = "PASSED" + if payload['status'] == 'completed': + passed, failed, error, msg = __gen_completed_message(bucket, date, 1) + total=passed + failed + error + if total != 0: + coverage = passed/total + else: + msg = __gen_uncompleted_message(payload, 1) -def lambda_handler(event,context): - # event={'project_name': 'Chatbot Portal with Agent', 'build_url': 'https://ap-northeast-1.console.aws.amazon.com/codebuild/home?region=ap-northeast-1#/builds/AgentApiTest:9d97a692-cc2c-4372-8538-58a192735f13/view/new', 'status': 'completed', 'bucket': 'intelli-agent-rag-ap-northeast-1-api-test', 's3_key': '2024-06-30_13-21-21_detail.json', 'log': '2024-06-30_13-21-21_detail.log', 'topic': 'arn:aws:sns:ap-northeast-1:544919262599:agent-developers'} - if event['status'] == 'completed': - __gen_completed_report(event) + if third_payload['status'] == 'completed': + third_passed, third_failed, third_error, third_msg = __gen_completed_message(bucket, date, 0) + third_total = third_passed + third_failed + third_error + if third_total != 0: + third_coverage = third_passed/third_total else: - __gen_uncompleted_report(event) + third_msg = __gen_uncompleted_message(payload, 0) + + message = f"Hi, team!\nThe following is API autotest report for {date}.\n\n ============================ summary =============================\n REPOSITORY: {payload['repository']}\n BRANCH: {payload['branch']}\n TEST RESULT: {status}\n Built-In KB Total:{passed + failed + error} Passed:{passed} Failed:{failed} Error:{error}\n Coverage:{coverage}\n Third KB Total:{third_passed + third_failed + third_error} Passed:{third_passed} Failed:{third_failed} Error:{third_error}\n Coverage:{third_coverage}\n\n\n " + message += msg + message += third_msg + message+=f"\n\n More details click:\n Built-in KB: {payload['build_url']}\n Third KB: {third_payload['build_url']}" + message+="\n\nBR.\nThanks" + + # Publish to SNS + __send_report(payload['topic'], f"[{payload['project_name']}][{date}][{status}] API AutoTest Report", message) return { 'statusCode': 200, From a4c99df4ecbb43755f1d75ed833cea6edbbb399c Mon Sep 17 00:00:00 2001 From: zhouxss Date: Tue, 29 Oct 2024 14:26:45 +0000 Subject: [PATCH 045/110] complete the tool refactor, next to test --- .../common_logic/common_utils/prompt_utils.py | 84 ++- .../functions_utils/retriever/retriever.py | 1 - .../online_entries/common_entry_v2.py | 678 ++++++++++++++++++ .../online/langchain_integration/__init__.py | 0 .../chains/chat_chain.py | 13 + .../chains/conversation_summary_chain.py | 20 + .../chains/tool_calling_chain_api.py | 158 ++++ .../chains/tool_calling_chain_claude_api.py | 320 --------- .../langchain_integration/tools/__init__.py | 63 +- .../tools/common_tools/__init__.py | 121 ---- .../tools/common_tools/rag.py | 11 +- 11 files changed, 1003 insertions(+), 466 deletions(-) create mode 100644 source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py create mode 100644 source/lambda/online/langchain_integration/__init__.py create mode 100644 source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py delete mode 100644 source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py delete mode 100644 source/lambda/online/langchain_integration/tools/common_tools/__init__.py diff --git a/source/lambda/online/common_logic/common_utils/prompt_utils.py b/source/lambda/online/common_logic/common_utils/prompt_utils.py index f2a9c3f56..644411a6b 100644 --- a/source/lambda/online/common_logic/common_utils/prompt_utils.py +++ b/source/lambda/online/common_logic/common_utils/prompt_utils.py @@ -361,7 +361,19 @@ def prompt_template_render(self, prompt_template: dict): ) ################# api agent prompt ##################### -AGENT_USER_PROMPT = "你是一个AI助理。今天是{date},{weekday}. " +AGENT_SYSTEM_PROMPT = """\ +You are a helpful AI assistant. Today is {date},{weekday}. +Here are some guidelines for you: + +- Always start each answer with a reflection and write the reflection process in the tag. Please follow the steps below to think about it: + 1. Determine whether the current context is sufficient to answer the user's question. + 2. If the current context is sufficient to answer the user's question, call the `give_final_response` tool. + 3. If the current context is not sufficient to answer the user's question, you can consider calling the provided tools. + 4. If the parameters of the tool you call do not meet the requirements, call the `give_rhetorical_question` tool to ask the user for more information. If the tool does not require parameters, do not call the `give_rhetorical_question` tool. + 5. Finally, output the name of the tool you want to call. +- Always output with the same language as the content within . If the content is english, use english to output. If the content is chinese, use chinese to output. +""" + register_prompt_templates( model_ids=[ LLMModelType.CLAUDE_3_HAIKU, @@ -372,20 +384,62 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.TOOL_CALLING_API, - prompt_template=AGENT_USER_PROMPT, - prompt_name="user_prompt" + prompt_template=AGENT_SYSTEM_PROMPT, + prompt_name="agent_system_prompt" ) -AGENT_GUIDELINES_PROMPT = """ -- 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考: - 1. 判断根据当前的上下文是否足够回答用户的问题。 - 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 - 3. 如果当前的上下文不能支持回答用户的问题,你可以考虑调用提供的工具。 - 4. 如果调用工具对应的参数不够,请调用反问工具 `give_rhetorical_question` 来让用户提供更加充分的信息。如果调用工具不需要参数,则不需要调用反问工具。 - 5. 最后给出你要调用的工具名称。 -- Always output with the same language as user's query. If the content is english, use englisth to output. If the content is Chinese, use Chinese to output. - -""" +# AGENT_GUIDELINES_PROMPT = """ +# - 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考:。 +# 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 +# 3. 如果当前的上下文不能支持回答用户的问题,你可以考虑调用提供的工具。 +# 4. 如果调用工具对应的参数不够,请调用反问工具 `give_rhetorical_question` 来让用户提供更加充分的信息。如果调用工具不需要参数,则不需要调用反问工具。 +# 5. 最后给出你要调用的工具名称。 +# - Always output with the same language as user's query. If the content is english, use englisth to output. If the content is Chinese, use Chinese to output. +# """ +# register_prompt_templates( +# model_ids=[ +# LLMModelType.CLAUDE_3_HAIKU, +# LLMModelType.CLAUDE_3_SONNET, +# LLMModelType.CLAUDE_3_5_SONNET, +# LLMModelType.LLAMA3_1_70B_INSTRUCT, +# LLMModelType.MISTRAL_LARGE_2407, +# LLMModelType.COHERE_COMMAND_R_PLUS, +# ], +# task_type=LLMTaskType.TOOL_CALLING_API, +# prompt_template=AGENT_USER_PROMPT, +# prompt_name="agent_prompt" +# ) + +# AGENT_GUIDELINES_PROMPT = """ +# - 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考: +# 1. 判断根据当前的上下文是否足够回答用户的问题。 +# 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 +# 3. 如果当前的上下文不能支持回答用户的问题,你可以考虑调用提供的工具。 +# 4. 如果调用工具对应的参数不够,请调用反问工具 `give_rhetorical_question` 来让用户提供更加充分的信息。如果调用工具不需要参数,则不需要调用反问工具。 +# 5. 最后给出你要调用的工具名称。 +# - Always output with the same language as user's query. If the content is english, use englisth to output. If the content is Chinese, use Chinese to output. +# +# """ + +# register_prompt_templates( +# model_ids=[ +# LLMModelType.CLAUDE_2, +# LLMModelType.CLAUDE_21, +# LLMModelType.CLAUDE_3_HAIKU, +# LLMModelType.CLAUDE_3_SONNET, +# LLMModelType.CLAUDE_3_5_SONNET, +# LLMModelType.LLAMA3_1_70B_INSTRUCT, +# LLMModelType.MISTRAL_LARGE_2407, +# LLMModelType.COHERE_COMMAND_R_PLUS, +# ], +# task_type=LLMTaskType.TOOL_CALLING_API, +# prompt_template=AGENT_GUIDELINES_PROMPT, +# prompt_name="guidelines_prompt" +# ) + +TOOL_FEWSHOT_PROMPT = """\ +Input: {query} +Args: {args}""" register_prompt_templates( model_ids=[ @@ -399,8 +453,8 @@ def prompt_template_render(self, prompt_template: dict): LLMModelType.COHERE_COMMAND_R_PLUS, ], task_type=LLMTaskType.TOOL_CALLING_API, - prompt_template=AGENT_GUIDELINES_PROMPT, - prompt_name="guidelines_prompt" + prompt_template=TOOL_FEWSHOT_PROMPT, + prompt_name="tool_fewshot_prompt" ) diff --git a/source/lambda/online/functions/functions_utils/retriever/retriever.py b/source/lambda/online/functions/functions_utils/retriever/retriever.py index 086006e08..694f8fbdd 100644 --- a/source/lambda/online/functions/functions_utils/retriever/retriever.py +++ b/source/lambda/online/functions/functions_utils/retriever/retriever.py @@ -1,6 +1,5 @@ import json import os - os.environ["PYTHONUNBUFFERED"] = "1" import logging import sys diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py new file mode 100644 index 000000000..e05aedcb1 --- /dev/null +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py @@ -0,0 +1,678 @@ +import json +from typing import Annotated, Any, TypedDict, List +import copy + +from common_logic.common_utils.chatbot_utils import ChatbotManager +from common_logic.common_utils.constant import ( + ChatbotMode, + IndexType, + LLMTaskType, + SceneType, + ToolRuningMode, +) +from common_logic.common_utils.lambda_invoke_utils import ( + invoke_lambda, + is_running_local, + node_monitor_wrapper, + send_trace, +) +from langchain_core.messages import ToolMessage,AIMessage +from common_logic.common_utils.logger_utils import get_logger +from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb +from common_logic.common_utils.python_utils import add_messages, update_nest_dict +from common_logic.common_utils.response_utils import process_response +from common_logic.common_utils.serialization_utils import JSONEncoder +from langchain_integration.tools import ToolManager +from langchain_core.tools import BaseTool +from langchain_core.messages.tool import ToolCall +from langgraph.prebuilt import ToolNode +from langchain_integration.chains import LLMChain + + +# from lambda_main.main_utils.online_entries.agent_base import ( +# build_agent_graph, +# tool_execution, +# ) +from lambda_main.main_utils.parse_config import CommonConfigParser +from langgraph.graph import END, StateGraph +from langchain_integration.langgraph_integration import set_currrent_app + +logger = get_logger("common_entry") + + +class ChatbotState(TypedDict): + ########### input/output states ########### + # inputs + # origin event body + event_body: dict + # origianl input question + query: str + # chat history between human and agent + chat_history: Annotated[list[dict], add_messages] + # complete chatbot config, consumed by all the nodes + chatbot_config: dict + # websocket connection id for the agent + ws_connection_id: str + # whether to enbale stream output via ws_connection_id + stream: bool + # message id related to original input question + message_id: str = None + # record running states of different nodes + trace_infos: Annotated[list[str], add_messages] + # whether to enbale trace info update via streaming ouput + enable_trace: bool + # outputs + # final answer generated by whole app graph + answer: Any + # information needed return to user, e.g. intention, context, figure and so on, anything you can get during execution + extra_response: Annotated[dict, update_nest_dict] + # addition kwargs which need to save into ddb + ddb_additional_kwargs: dict + # response of entire app + app_response: Any + + ########### query rewrite states ########### + # query rewrite results + query_rewrite: str = None + + ########### intention detection states ########### + # intention type of retrieved intention samples in search engine, e.g. OpenSearch + intent_type: str = None + # retrieved intention samples in search engine, e.g. OpenSearch + intent_fewshot_examples: list + # tools of retrieved intention samples in search engine, e.g. OpenSearch + intent_fewshot_tools: list + + ########### retriever states ########### + # contexts information retrieved in search engine, e.g. OpenSearch + qq_match_results: list = [] + contexts: str = None + figure: list = None + + ########### agent states ########### + # current output of agent + # agent_current_output: dict + # # record messages during agent tool choose and calling, including agent message, tool ouput and error messages + agent_tool_history: Annotated[List[AIMessage | ToolMessage], add_messages] + # # the maximum number that agent node can be called + # agent_repeated_call_limit: int + # # the current call time of agent + # agent_current_call_number: int # + # # whehter the current call time is less than maximum number of agent call + # agent_repeated_call_validation: bool + # # function calling + # # whether the output of agent can be parsed as the valid tool calling + # function_calling_parse_ok: bool + # # whether the current parsed tool calling is run once + tool_calling_is_run_once: bool + # # current tool calls + # function_calling_parsed_tool_calls: list + # current_agent_tools_def: list + last_tool_messages: List[ToolMessage] + tools: List[BaseTool] + # the global rag tool use all knowledge + all_knowledge_rag_tool: BaseTool + + +def is_null_or_empty(value): + if value is None: + return True + elif isinstance(value, (dict, list, str)) and not value: + return True + return False + + +def format_intention_output(data): + if is_null_or_empty(data): + return "" + + markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" + markdown_table += "|----------------------|-------|------------|-------------|----------------------|\n" + for item in data: + query = item.get("query", "") + score = item.get("score", "") + name = item.get("name", "") + intent = item.get("intent", "") + kwargs = ', '.join([f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) + markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" + logger.info(markdown_table) + + return markdown_table + +#################### +# nodes in graph # +#################### + + +@node_monitor_wrapper +def query_preprocess(state: ChatbotState): + output: str = invoke_lambda( + event_body=state, + lambda_name="Online_Query_Preprocess", + lambda_module_path="lambda_query_preprocess.query_preprocess", + handler_name="lambda_handler", + ) + + send_trace(f"\n**query rewrite:** {output}\n**origin query:** {state['query']}") + return {"query_rewrite": output} + + +@node_monitor_wrapper +def intention_detection(state: ChatbotState): + # if state['chatbot_config']['agent_config']['only_use_rag_tool']: + # return { + # "intent_type": "intention detected" + # } + retriever_params = state["chatbot_config"]["qq_match_config"] + retriever_params["query"] = state[ + retriever_params.get("retriever_config", {}).get("query_key", "query") + ] + output: str = invoke_lambda( + event_body=retriever_params, + lambda_name="Online_Functions", + lambda_module_path="functions.functions_utils.retriever.retriever", + handler_name="lambda_handler", + ) + context_list = [] + qq_match_threshold = retriever_params["threshold"] + for doc in output["result"]["docs"]: + if doc["retrieval_score"] > qq_match_threshold: + send_trace( + f"\n\n**similar query found**\n{doc}", + state["stream"], + state["ws_connection_id"], + state["enable_trace"], + ) + query_content = doc["answer"] + # query_content = doc['answer']['jsonlAnswer'] + return { + "answer": query_content, + "intent_type": "similar query found", + } + question = doc["question"] + answer = doc["answer"] + context_list.append(f"问题: {question}, \n答案:{answer}") + + if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: + return {"qq_match_results": context_list, "intent_type": "intention detected"} + + intent_fewshot_examples = invoke_lambda( + lambda_module_path="lambda_intention_detection.intention", + lambda_name="Online_Intention_Detection", + handler_name="lambda_handler", + event_body=state, + ) + + intent_fewshot_tools: list[str] = list( + set([e["intent"] for e in intent_fewshot_examples]) + ) + + markdown_table = format_intention_output(intent_fewshot_examples) + send_trace( + f"**intention retrieved:**\n\n {markdown_table}", + state["stream"], + state["ws_connection_id"], + state["enable_trace"], + ) + return { + "intent_fewshot_examples": intent_fewshot_examples, + "intent_fewshot_tools": intent_fewshot_tools, + "qq_match_results": context_list, + "intent_type": "intention detected", + } + + +@node_monitor_wrapper +def agent(state: ChatbotState): + # two cases to invoke rag function + # 1. when valid intention fewshot found + # 2. for the first time, agent decides to give final results + + # deal with once tool calling + last_tool_messages = state["last_tool_messages"] + if last_tool_messages and len(last_tool_messages) == 1: + last_tool_message = last_tool_messages[0] + tool:BaseTool = ToolManager.get_tool( + scene=SceneType.COMMON, + name=last_tool_message.name + ) + if tool.return_direct: + send_trace("once tool", enable_trace=state["enable_trace"]) + return {"answer": last_tool_message.content, "tool_calling_is_run_once": True} + + # tool_execute_res = last_tool_calls_results[-1].additional_kwargs[ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # output = tool_execute_res["output"] + # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) + # if tool.running_mode == ToolRuningMode.ONCE: + # send_trace("once tool", enable_trace=state["enable_trace"]) + # return {"answer": output["result"], "tool_calling_is_run_once": True} + + + + # if state["agent_tool_history"] and state["agent_tool_history"][-1].type=="tool_call": + # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # output = tool_execute_res["output"] + # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) + # if tool.running_mode == ToolRuningMode.ONCE: + # send_trace("once tool", enable_trace=state["enable_trace"]) + # return {"answer": output["result"], "tool_calling_is_run_once": True} + + no_intention_condition = not state["intent_fewshot_examples"] + # first_tool_final_response = False + # if ( + # (state["agent_current_call_number"] == 1) + # and state["function_calling_parse_ok"] + # and state["agent_tool_history"] + # ): + # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # if tool_name == "give_final_response": + # first_tool_final_response = True + + if ( + no_intention_condition + # or first_tool_final_response + or state["chatbot_config"]["agent_config"]["only_use_rag_tool"] + ): + if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: + send_trace("agent only use rag tool", enable_trace=state["enable_trace"]) + elif no_intention_condition: + send_trace( + "no_intention_condition, switch to rag tool", + enable_trace=state["enable_trace"], + ) + # elif first_tool_final_response: + # send_trace( + # "first tool is final response, switch to rag tool", + # enable_trace=state["enable_trace"], + # ) + + all_knowledge_rag_tool = state['all_knowledge_rag_tool'] + return AIMessage(content="",tool_calls=[ + ToolCall( + name=all_knowledge_rag_tool.name, + args={} + ) + ]) + + # response = app_agent.invoke(state) + + # normal call + agent_config = state["chatbot_config"]['agent_config'] + tools_name = state['intent_fewshot_tools'] + agent_config['tools'] + # get tools from tool names + tools = [ + ToolManager.get_tool( + scene=SceneType.COMMON, + name=name + ) + for name in tools_name + ] + llm_config = { + **agent_config['llm_config'], + "tools": tools, + "fewshot_examples": state['intent_fewshot_examples'], + } + group_name = state['chatbot_config']['group_name'] + chatbot_id = state['chatbot_config']['chatbot_id'] + prompt_templates_from_ddb = get_prompt_templates_from_ddb( + group_name, + model_id = llm_config['model_id'], + task_type=LLMTaskType.TOOL_CALLING_API, + chatbot_id=chatbot_id + ) + llm_config.update(**prompt_templates_from_ddb) + + tool_calling_chain = LLMChain.get_chain( + intent_type=LLMTaskType.TOOL_CALLING_API, + scene=SceneType.COMMON, + **llm_config + ) + agent_message:AIMessage = tool_calling_chain.invoke(**state) + send_trace( + # f"\n\n**agent_current_output:** \n{agent_message}\n\n **agent_current_call_number:** {agent_current_call_number}", + f"\n\n**agent_current_output:** \n{agent_message}\n\n", + state["stream"], + state["ws_connection_id"] + ) + + return {"agent_tool_history":[agent_message],"tools":tools} + + +@node_monitor_wrapper +def llm_direct_results_generation(state: ChatbotState): + group_name = state["chatbot_config"]["group_name"] + llm_config = state["chatbot_config"]["chat_config"] + task_type = LLMTaskType.CHAT + + prompt_templates_from_ddb = get_prompt_templates_from_ddb( + group_name, model_id=llm_config["model_id"], task_type=task_type + ) + logger.info(prompt_templates_from_ddb) + + answer: dict = invoke_lambda( + event_body={ + "llm_config": { + **llm_config, + "stream": state["stream"], + "intent_type": task_type, + **prompt_templates_from_ddb, + }, + "llm_input": { + "query": state["query"], + "chat_history": state["chat_history"], + }, + }, + lambda_name="Online_LLM_Generate", + lambda_module_path="lambda_llm_generate.llm_generate", + handler_name="lambda_handler", + ) + return {"answer": answer} + + +@node_monitor_wrapper +def tool_execution(state): + """executor lambda + Args: + state (NestUpdateState): _description_ + + Returns: + _type_: _description_ + """ + tools:List[BaseTool] = state['tools'] + tool_node = ToolNode(tools) + last_agent_message:AIMessage = state["agent_tool_history"][-1] + + # pass state to tools if needed + tools_map = {tool.name:tool for tool in tools} + tool_calls:List[ToolCall] = copy.deepcopy(last_agent_message.tool_calls) + + for tool_call in tool_calls: + tool = tools_map[tool_call.name] + if tool.pass_state: + tool_call.args.update({tool.pass_state_name:state}) + + tool_messages:List[ToolMessage] = tool_node.invoke( + [AIMessage(content="",tool_calls=tool_calls)] + ) + + # tool_calls = state['function_calling_parsed_tool_calls'] + # assert len(tool_calls) == 1, tool_calls + # tool_call_results = [] + # for tool_call in tool_calls: + # tool_name = tool_call["name"] + # tool_kwargs = tool_call['kwargs'] + # # call tool + # output = invoke_lambda( + # event_body = { + # "tool_name":tool_name, + # "state":state, + # "kwargs":tool_kwargs + # }, + # lambda_name="Online_Tool_Execute", + # lambda_module_path="functions.lambda_tool", + # handler_name="lambda_handler" + # ) + # tool_call_results.append({ + # "name": tool_name, + # "output": output, + # "kwargs": tool_call['kwargs'], + # "model_id": tool_call['model_id'] + # }) + + # output = format_tool_call_results(tool_call['model_id'],tool_call_results) + send_trace(f'**tool_execute_res:** \n{tool_messages}', enable_trace=state["enable_trace"]) + return { + "agent_tool_history": tool_messages, + "last_tool_messages": tool_messages + } + + +def final_results_preparation(state: ChatbotState): + app_response = process_response(state["event_body"], state) + return {"app_response": app_response} + + +def matched_query_return(state: ChatbotState): + return {"answer": state["answer"]} + + +################ +# define edges # +################ + + +def query_route(state: dict): + return f"{state['chatbot_config']['chatbot_mode']} mode" + + +def intent_route(state: dict): + return state["intent_type"] + + +def agent_route(state: dict): + if state.get("tool_calling_is_run_once", False): + return "no need tool calling" + + # state["agent_repeated_call_validation"] = ( + # state["agent_current_call_number"] < state["agent_repeated_call_limit"] + # ) + + if state["agent_repeated_call_validation"]: + return "valid tool calling" + else: + # TODO give final strategy + raise RuntimeError + + +############################# +# define online top-level graph for app # +############################# + + +def build_graph(chatbot_state_cls): + workflow = StateGraph(chatbot_state_cls) + + # add node for all chat/rag/agent mode + workflow.add_node("query_preprocess", query_preprocess) + # chat mode + workflow.add_node("llm_direct_results_generation", llm_direct_results_generation) + # rag mode + # workflow.add_node("knowledge_retrieve", knowledge_retrieve) + # workflow.add_node("llm_rag_results_generation", llm_rag_results_generation) + # agent mode + workflow.add_node("intention_detection", intention_detection) + workflow.add_node("matched_query_return", matched_query_return) + # agent sub graph + workflow.add_node("agent", agent) + workflow.add_node("tools_execution", tool_execution) + workflow.add_node("final_results_preparation", final_results_preparation) + + # add all edges + workflow.set_entry_point("query_preprocess") + # chat mode + workflow.add_edge("llm_direct_results_generation", "final_results_preparation") + # rag mode + # workflow.add_edge("knowledge_retrieve", "llm_rag_results_generation") + # workflow.add_edge("llm_rag_results_generation", END) + # agent mode + workflow.add_edge("tools_execution", "agent") + workflow.add_edge("matched_query_return", "final_results_preparation") + workflow.add_edge("final_results_preparation", END) + + # add conditional edges + # choose running mode based on user selection: + # 1. chat mode: let llm generate results directly + # 2. rag mode: retrive all knowledge and let llm generate results + # 3. agent mode: let llm generate results based on intention detection, tool calling and retrieved knowledge + workflow.add_conditional_edges( + "query_preprocess", + query_route, + { + "chat mode": "llm_direct_results_generation", + "agent mode": "intention_detection", + }, + ) + + # three running branch will be chosen based on intention detection results: + # 1. similar query found: if very similar queries were found in knowledge base, these queries will be given as results + # 2. intention detected: if intention detected, the agent logic will be invoked + workflow.add_conditional_edges( + "intention_detection", + intent_route, + { + "similar query found": "matched_query_return", + "intention detected": "agent", + }, + ) + + # the results of agent planning will be evaluated and decide next step: + # 1. valid tool calling: the agent chooses the valid tools, and the tools will be executed + # 2. no need tool calling: the agent thinks no tool needs to be called, the final results can be generated + workflow.add_conditional_edges( + "agent", + agent_route, + { + "valid tool calling": "tools_execution", + "no need tool calling": "final_results_preparation", + }, + ) + + app = workflow.compile() + return app + + +##################################### +# define online sub-graph for agent # +##################################### +# app_agent = None +app = None + + +# def register_rag_tool( +# name: str, +# description: str, +# scene=SceneType.COMMON, +# lambda_name: str = "lambda_common_tools", +# ): +# tool_manager.register_tool( +# { +# "name": name, +# "scene": scene, +# "lambda_name": lambda_name, +# "lambda_module_path": rag.lambda_handler, +# "tool_def": { +# "name": name, +# "description": description, +# }, +# "running_mode": ToolRuningMode.ONCE, +# } +# ) + +def register_rag_tool_from_config(event_body: dict): + group_name = event_body.get("chatbot_config").get("group_name", "Admin") + chatbot_id = event_body.get("chatbot_config").get("chatbot_id", "admin") + chatbot_manager = ChatbotManager.from_environ() + chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) + logger.info(chatbot) + for index_type, item_dict in chatbot.index_ids.items(): + if index_type != IndexType.INTENTION: + for index_content in item_dict["value"].values(): + if "indexId" in index_content and "description" in index_content: + # TODO give specific retriever config + ToolManager.register_common_rag_tool( + retriever_config=event_body["chatbot_config"]["private_knowledge_config"], + name=index_content["indexId"], + scene=SceneType.COMMON, + description=index_content["description"], + pass_state=True, + pass_state_name='state' + ) + + +def common_entry(event_body): + """ + Entry point for the Lambda function. + :param event_body: The event body for lambda function. + return: answer(str) + """ + global app, app_agent + if app is None: + app = build_graph(ChatbotState) + + # if app_agent is None: + # app_agent = build_agent_graph(ChatbotState) + + # debuging + if is_running_local(): + with open("common_entry_workflow.png", "wb") as f: + f.write(app.get_graph().draw_mermaid_png()) + + # with open("common_entry_agent_workflow.png", "wb") as f: + # f.write(app_agent.get_graph().draw_mermaid_png()) + + ################################################################################ + # prepare inputs and invoke graph + event_body["chatbot_config"] = CommonConfigParser.from_chatbot_config( + event_body["chatbot_config"] + ) + logger.info(event_body) + chatbot_config = event_body["chatbot_config"] + query = event_body["query"] + use_history = chatbot_config["use_history"] + chat_history = event_body["chat_history"] if use_history else [] + stream = event_body["stream"] + message_id = event_body["custom_message_id"] + ws_connection_id = event_body["ws_connection_id"] + enable_trace = chatbot_config["enable_trace"] + + # register as rag tool for each aos index + register_rag_tool_from_config(event_body) + + # define all knowledge rag tool + all_knowledge_rag_tool = ToolManager.register_common_rag_tool( + retriever_config=event_body["chatbot_config"]["private_knowledge_config"], + name="all_knowledge_rag_tool", + scene=SceneType.COMMON, + description="all knowledge rag tool", + pass_state=True, + pass_state_name='state' + ) + + # invoke graph and get results + response = app.invoke( + { + "event_body": event_body, + "stream": stream, + "chatbot_config": chatbot_config, + "query": query, + "enable_trace": enable_trace, + "trace_infos": [], + "message_id": message_id, + "chat_history": chat_history, + "agent_tool_history": [], + "ws_connection_id": ws_connection_id, + "debug_infos": {}, + "extra_response": {}, + "qq_match_results": [], + "last_tool_calls_results":None, + "all_knowledge_rag_tool":all_knowledge_rag_tool, + "tools":None, + # "agent_repeated_call_limit": chatbot_config["agent_repeated_call_limit"], + # "agent_current_call_number": 0, + "ddb_additional_kwargs": {}, + + } + ) + return response["app_response"] + + +main_chain_entry = common_entry diff --git a/source/lambda/online/langchain_integration/__init__.py b/source/lambda/online/langchain_integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/source/lambda/online/langchain_integration/chains/chat_chain.py b/source/lambda/online/langchain_integration/chains/chat_chain.py index 35bdb41c0..b51e342d4 100644 --- a/source/lambda/online/langchain_integration/chains/chat_chain.py +++ b/source/lambda/online/langchain_integration/chains/chat_chain.py @@ -98,6 +98,19 @@ class Mixtral8x7bChatChain(Claude2ChatChain): default_model_kwargs = {"max_tokens": 4096, "temperature": 0.01} +class Llama31Instruct70BChatChain(Claude2ChatChain): + model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + + +class MistraLlargeChat2407ChatChain(Claude2ChatChain): + model_id = LLMModelType.MISTRAL_LARGE_2407 + + +class CohereCommandRPlusChatChain(Claude2ChatChain): + model_id = LLMModelType.COHERE_COMMAND_R_PLUS + + + class Baichuan2Chat13B4BitsChatChain(LLMChain): model_id = LLMModelType.BAICHUAN2_13B_CHAT intent_type = LLMTaskType.CHAT diff --git a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py index c7f0631f1..61b67598b 100644 --- a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py +++ b/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py @@ -197,6 +197,26 @@ class Claude3HaikuConversationSummaryChain(Claude2ConversationSummaryChain): model_id = LLMModelType.CLAUDE_3_HAIKU +class Mixtral8x7bConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT + default_model_kwargs = {"max_tokens": 4096, "temperature": 0.01} + + +class Llama31Instruct70BConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + + +class MistraLlargeChat2407ConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.MISTRAL_LARGE_2407 + + +class CohereCommandRPlusConversationSummaryChain(Claude2ConversationSummaryChain): + model_id = LLMModelType.COHERE_COMMAND_R_PLUS + + + + + class Qwen2Instruct72BConversationSummaryChain(Claude2ConversationSummaryChain): model_id = LLMModelType.QWEN2INSTRUCT72B diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py b/source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py new file mode 100644 index 000000000..be918e369 --- /dev/null +++ b/source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py @@ -0,0 +1,158 @@ +# tool calling chain +import json +from typing import List,Dict,Any +import re +from collections import defaultdict + +from langchain.schema.runnable import ( + RunnableLambda, + RunnablePassthrough +) +from common_logic.common_utils.prompt_utils import get_prompt_template +from common_logic.common_utils.logger_utils import print_llm_messages +from langchain_core.messages import( + AIMessage, + SystemMessage +) +from langchain.prompts import ChatPromptTemplate +from langchain_core.messages import AIMessage,SystemMessage +from langchain.tools.base import BaseTool +from langchain_core.language_models import BaseChatModel + +from common_logic.common_utils.constant import ( + LLMTaskType, + LLMModelType, + MessageType +) +from common_logic.common_utils.time_utils import get_china_now + +from . import LLMChain +from ..chat_models import Model + + +class Claude2ToolCallingChain(LLMChain): + model_id = LLMModelType.CLAUDE_2 + intent_type = LLMTaskType.TOOL_CALLING_API + default_model_kwargs = { + "max_tokens": 2000, + "temperature": 0.1, + "top_p": 0.9 + } + + @classmethod + def create_chat_history(cls,x): + chat_history = x['chat_history'] + \ + [{"role": MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ + x['agent_tool_history'] + return chat_history + + @classmethod + def get_common_system_prompt(cls,system_prompt_template:str): + now = get_china_now() + date_str = now.strftime("%Y年%m月%d日") + weekdays = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] + weekday = weekdays[now.weekday()] + system_prompt = system_prompt_template.format(date=date_str,weekday=weekday) + return system_prompt + + + @classmethod + def bind_tools(cls,llm:BaseChatModel,tools:List[BaseTool], fewshot_examples=None, fewshot_template=None,tool_choice='any'): + tools = [tool.model_copy() for tool in tools] + if not fewshot_examples: + return llm.bind_tools(tools,tool_choice=tool_choice) + + # add fewshot examples to tool description + tools_map = {tool.name:tool for tool in tools} + + # group fewshot examples + fewshot_examples_grouped = defaultdict(list) + for example in fewshot_examples: + fewshot_examples_grouped[example['name']].append(example) + + for tool_name,examples in fewshot_examples_grouped.items(): + tool = tools_map[tool_name] + tool.description += "\n\nHere are some examples where this tool are called:\n" + examples_strs = [] + for example in examples: + params_str = json.dumps(example['kwargs'],ensure_ascii=False) + examples_strs.append( + fewshot_template.format( + query=example['query'], + args=params_str + ) + ) + + tool.description += "\n\n".join(examples_strs) + return llm.bind_tools(tools,tool_choice=tool_choice) + + + @classmethod + def create_chain(cls, model_kwargs=None, **kwargs): + model_kwargs = model_kwargs or {} + tools:list = kwargs['tools'] + assert all(isinstance(tool,BaseTool) for tool in tools),tools + fewshot_examples = kwargs.get('fewshot_examples',[]) + if fewshot_examples: + fewshot_examples.append({ + "name": "give_rhetorical_question", + "query": "今天天气怎么样?", + "kwargs": {"question": "请问你想了解哪个城市的天气?"} + }) + agent_system_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="agent_system_prompt" + ).prompt_template + + agent_system_prompt = kwargs.get("agent_system_prompt",None) or agent_system_prompt + + agent_system_prompt = cls.get_common_system_prompt( + agent_system_prompt + ) + + # tool fewshot prompt + tool_fewshot_prompt = get_prompt_template( + model_id=cls.model_id, + task_type=cls.intent_type, + prompt_name="tool_fewshot_prompt" + ).prompt_template + tool_fewshot_prompt = kwargs.get('tool_fewshot_prompt',None) or tool_fewshot_prompt + + model_kwargs = {**cls.default_model_kwargs, **model_kwargs} + + llm = Model.get_model( + model_id=cls.model_id, + model_kwargs=model_kwargs, + ) + llm = cls.bind_tools(llm,tools,fewshot_examples,fewshot_template=tool_fewshot_prompt) + chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | llm + return chain + + +class Claude21ToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_21 + + +class Claude3SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_SONNET + + +class Claude3HaikuToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_HAIKU + + +class Claude35SonnetToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.CLAUDE_3_5_SONNET + + +class Llama31Instruct70BToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + + +class MistraLlarge2407ToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.MISTRAL_LARGE_2407 + + +class CohereCommandRPlusToolCallingChain(Claude2ToolCallingChain): + model_id = LLMModelType.COHERE_COMMAND_R_PLUS diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py b/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py deleted file mode 100644 index 55d88f958..000000000 --- a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_api.py +++ /dev/null @@ -1,320 +0,0 @@ -# tool calling chain -import json -from typing import List,Dict,Any -import re - -from langchain.schema.runnable import ( - RunnableLambda, - RunnablePassthrough -) -from common_logic.common_utils.prompt_utils import get_prompt_template -from common_logic.common_utils.logger_utils import print_llm_messages -from langchain_core.messages import( - AIMessage, - SystemMessage -) -from langchain.prompts import ChatPromptTemplate -from langchain_core.messages import AIMessage,SystemMessage -from langchain.tools.base import BaseTool - -from common_logic.common_utils.constant import ( - LLMTaskType, - LLMModelType, - MessageType -) -from common_logic.common_utils.time_utils import get_china_now - -from . import LLMChain -from ..chat_models import Model - -# incorrect_tool_call_example = """Here is an example of an incorrectly formatted tool call, which you should avoid. -# -# -# -# tool_name -# -# -# question -# string -# value -# -# -# -# -# - -# In this incorrect tool calling example, the parameter `name` should form a XLM tag. -# """ - - -# SYSTEM_MESSAGE_PROMPT =(f"In this environment you have access to a set of tools you can use to answer the user's question.\n" -# "\n" -# "You may call them like this:\n" -# "\n" -# "\n" -# "$TOOL_NAME\n" -# "\n" -# "<$PARAMETER_NAME>$PARAMETER_VALUE\n" -# "...\n" -# "\n" -# "\n" -# "\n" -# "\n" -# "Here are the tools available:\n" -# "\n" -# "{tools}" -# "\n" -# "\nAnswer the user's request using relevant tools (if they are available). Before calling a tool, do some analysis within tags. First, think about which of the provided tools is the relevant tool to answer the user's request. Second, go through each of the required parameters of the relevant tool and determine if the user has directly provided or given enough information to infer a value. When deciding if the parameter can be inferred, carefully consider all the context to see if it supports a specific value. If all of the required parameters are present or can be reasonably inferred, close the thinking tag and proceed with the tool call. BUT, if one of the values for a required parameter is missing, DO NOT invoke the function (not even with fillers for the missing params) and instead, ask the user to provide the missing parameters. DO NOT ask for more information on optional parameters if it is not provided." -# "\nHere are some guidelines for you:\n{tool_call_guidelines}." -# f"\n{incorrect_tool_call_example}" -# ) - -# SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES = SYSTEM_MESSAGE_PROMPT + ( -# "Some examples of tool calls are given below, where the content within represents the most recent reply in the dialog." -# "\n{fewshot_examples}" -# ) - -# TOOL_FORMAT = """ -# {tool_name} -# {tool_description} -# -# {formatted_required_parameters} -# -# -# {formatted_optional_parameters} -# -# """ - -# TOOL_PARAMETER_FORMAT = """ -# {parameter_name} -# {parameter_type} -# {parameter_description} -# """ - -# TOOL_EXECUTE_SUCCESS_TEMPLATE = """ -# -# -# {tool_name} -# -# {result} -# -# -# -# """ - -# TOOL_EXECUTE_FAIL_TEMPLATE = """ -# -# -# {error} -# -# -# """ - -# AGENT_SYSTEM_PROMPT = "你是一个亚马逊云科技的AI助理,你的名字是亚麻小Q。今天是{date_str},{weekday}. " - - -# def _get_type(parameter: Dict[str, Any]) -> str: -# if "type" in parameter: -# return parameter["type"] -# if "anyOf" in parameter: -# return json.dumps({"anyOf": parameter["anyOf"]}) -# if "allOf" in parameter: -# return json.dumps({"allOf": parameter["allOf"]}) -# return json.dumps(parameter) - - -# def convert_openai_tool_to_anthropic(tools:list[dict])->str: -# formatted_tools = tools -# tools_data = [ -# { -# "tool_name": tool["name"], -# "tool_description": tool["description"], -# "formatted_required_parameters": "\n".join( -# [ -# TOOL_PARAMETER_FORMAT.format( -# parameter_name=name, -# parameter_type=_get_type(parameter), -# parameter_description=parameter.get("description"), -# ) for name, parameter in tool["parameters"]["properties"].items() -# if name in tool["parameters"].get("required", []) -# ] -# ), -# "formatted_optional_parameters": "\n".join( -# [ -# TOOL_PARAMETER_FORMAT.format( -# parameter_name=name, -# parameter_type=_get_type(parameter), -# parameter_description=parameter.get("description"), -# ) for name, parameter in tool["parameters"]["properties"].items() -# if name not in tool["parameters"].get("required", []) -# ] -# ), -# } -# for tool in formatted_tools -# ] -# tools_formatted = "\n".join( -# [ -# TOOL_FORMAT.format( -# tool_name=tool["tool_name"], -# tool_description=tool["tool_description"], -# formatted_required_parameters=tool["formatted_required_parameters"], -# formatted_optional_parameters=tool["formatted_optional_parameters"], -# ) -# for tool in tools_data -# ] -# ) -# return tools_formatted - - -class Claude2ToolCallingChain(LLMChain): - model_id = LLMModelType.CLAUDE_2 - intent_type = LLMTaskType.TOOL_CALLING_API - default_model_kwargs = { - "max_tokens": 2000, - "temperature": 0.1, - "top_p": 0.9 - } - - @staticmethod - def format_fewshot_examples(fewshot_examples:list[dict]): - fewshot_example_strs = [] - for fewshot_example in fewshot_examples: - param_strs = [] - for p,v in fewshot_example['kwargs'].items(): - param_strs.append(f"<{p}>{v}\n" - f"{fewshot_example['query']}\n" - f"\n" - "\n" - "\n" - f"{fewshot_example['name']}\n" - "\n" - f"{param_str}" - "\n" - "\n" - "\n" - "\n" - "" - ) - fewshot_example_strs.append(fewshot_example_str) - fewshot_example_str = '\n'.join(fewshot_example_strs) - return f"\n{fewshot_example_str}\n" - - @classmethod - def parse_function_calls_from_ai_message(cls,message:AIMessage): - content = "" + message.content + "" - function_calls:List[str] = re.findall("(.*?)", content,re.S) - if not function_calls: - content = "" + message.content - - return { - "function_calls": function_calls, - "content": content - } - - @classmethod - def create_chat_history(cls,x): - chat_history = x['chat_history'] + \ - [{"role": MessageType.HUMAN_MESSAGE_TYPE,"content": x['query']}] + \ - x['agent_tool_history'] - return chat_history - - @classmethod - def get_common_system_prompt(cls,system_prompt_template:str): - now = get_china_now() - date_str = now.strftime("%Y年%m月%d日") - weekdays = ['星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] - weekday = weekdays[now.weekday()] - system_prompt = system_prompt_template.format(date=date_str,weekday=weekday) - return system_prompt - - - @classmethod - def create_chain(cls, model_kwargs=None, **kwargs): - model_kwargs = model_kwargs or {} - tools:list = kwargs['tools'] - assert all(isinstance(tool,BaseTool) for tool in tools),tools - fewshot_examples = kwargs.get('fewshot_examples',[]) - if fewshot_examples: - fewshot_examples.append({ - "name": "give_rhetorical_question", - "query": "今天天气怎么样?", - "kwargs": {"question": "请问你想了解哪个城市的天气?"} - }) - user_system_prompt = get_prompt_template( - model_id=cls.model_id, - task_type=cls.intent_type, - prompt_name="user_prompt" - ).prompt_template - - user_system_prompt = kwargs.get("user_prompt",None) or user_system_prompt - - user_system_prompt = cls.get_common_system_prompt( - user_system_prompt - ) - guidelines_prompt = get_prompt_template( - model_id=cls.model_id, - task_type=cls.intent_type, - prompt_name="guidelines_prompt" - ).prompt_template - - guidelines_prompt = kwargs.get("guidelines_prompt",None) or guidelines_prompt - model_kwargs = {**cls.default_model_kwargs, **model_kwargs} - - # tools_formatted = convert_openai_tool_to_anthropic(tools) - - if fewshot_examples: - system_prompt = SYSTEM_MESSAGE_PROMPT_WITH_FEWSHOT_EXAMPLES.format( - tools=tools_formatted, - fewshot_examples=cls.format_fewshot_examples(fewshot_examples), - tool_call_guidelines=guidelines_prompt - ) - else: - system_prompt = SYSTEM_MESSAGE_PROMPT.format( - tools=tools_formatted, - tool_call_guidelines=guidelines_prompt - ) - - system_prompt = user_system_prompt + system_prompt - tool_calling_template = ChatPromptTemplate.from_messages( - [ - SystemMessage(content=system_prompt), - ("placeholder", "{chat_history}"), - AIMessage(content="") - ]) - - llm = Model.get_model( - model_id=cls.model_id, - model_kwargs=model_kwargs, - ) - chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | tool_calling_template \ - | RunnableLambda(lambda x: print_llm_messages(f"Agent messages: {x.messages}") or x.messages ) \ - | llm | RunnableLambda(lambda message:cls.parse_function_calls_from_ai_message( - message - )) - return chain - - -class Claude21ToolCallingChain(Claude2ToolCallingChain): - model_id = LLMModelType.CLAUDE_21 - - -class ClaudeInstanceToolCallingChain(Claude2ToolCallingChain): - model_id = LLMModelType.CLAUDE_INSTANCE - - -class Claude3SonnetToolCallingChain(Claude2ToolCallingChain): - model_id = LLMModelType.CLAUDE_3_SONNET - - -class Claude3HaikuToolCallingChain(Claude2ToolCallingChain): - model_id = LLMModelType.CLAUDE_3_HAIKU - - -class Claude35SonnetToolCallingChain(Claude2ToolCallingChain): - model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0" diff --git a/source/lambda/online/langchain_integration/tools/__init__.py b/source/lambda/online/langchain_integration/tools/__init__.py index 91e937b24..0e81f84b1 100644 --- a/source/lambda/online/langchain_integration/tools/__init__.py +++ b/source/lambda/online/langchain_integration/tools/__init__.py @@ -39,10 +39,18 @@ from datamodel_code_generator import DataModelType, PythonVersion from datamodel_code_generator.model import get_data_model_types from datamodel_code_generator.parser.jsonschema import JsonSchemaParser -from langchain.tools.base import StructuredTool,BaseTool - +from langchain.tools.base import StructuredTool as _StructuredTool ,BaseTool +from langchain_core.pydantic_v1 import create_model,BaseModel from common_logic.common_utils.constant import SceneType from common_logic.common_utils.lambda_invoke_utils import invoke_with_lambda +from functools import partial + + + +class StructuredTool(_StructuredTool): + pass_state:bool = False # if pass state into tool invoke + pass_state_name:str = "state" # pass state name + class ToolIdentifier(BaseModel): @@ -104,6 +112,7 @@ def register_lc_tool( ) assert isinstance(tool,BaseTool),(tool,type(tool)) cls.tool_map[tool_identifier.tool_id] = tool + return tool @classmethod @@ -131,7 +140,7 @@ def register_func_as_tool( return_direct=return_direct ) # register tool - ToolManager.register_lc_tool( + return ToolManager.register_lc_tool( tool_identifier=tool_identifier, tool=tool ) @@ -165,11 +174,55 @@ def _func(**kargs): ), return_direct=return_direct ) - ToolManager.register_lc_tool( + return ToolManager.register_lc_tool( tool_identifier=tool_identifier, tool=tool ) + @classmethod + def register_common_rag_tool( + cls, + retriever_config:dict, + description:str, + scene=None, + name=None, + tool_identifier=None, + return_direct=False, + pass_state=True, + pass_state_name='state' + ): + assert scene == SceneType.COMMON, scene + from .common_tools.rag import rag_tool + + tool_identifier = cls.get_tool_identifier( + scene=scene, + name=name, + tool_identifier=tool_identifier + ) + + class RagModel(BaseModel): + class Config: + schema_extra = {"description": description} + + tool = StructuredTool.from_function( + func=partial(rag_tool, + retriever_config=retriever_config + ), + name=tool_identifier.name, + args_schema=ToolManager.convert_tool_def_to_pydantic( + tool_id=tool_identifier.tool_id, + tool_def=RagModel + ), + description=description, + return_direct=return_direct, + pass_state=pass_state, + pass_state_name=pass_state_name + ) + + return ToolManager.register_lc_tool( + tool_identifier=tool_identifier, + tool=tool + ) @classmethod @@ -306,7 +359,7 @@ def _load_common_rag_tool(tool_identifier:ToolIdentifier): ToolManager.register_func_as_tool( tool_identifier.scene, tool_identifier.name, - rag.rag, + rag.rag_tool, tool_def, return_direct=True ) diff --git a/source/lambda/online/langchain_integration/tools/common_tools/__init__.py b/source/lambda/online/langchain_integration/tools/common_tools/__init__.py deleted file mode 100644 index c57069898..000000000 --- a/source/lambda/online/langchain_integration/tools/common_tools/__init__.py +++ /dev/null @@ -1,121 +0,0 @@ -from common_logic.common_utils.constant import SceneType, ToolRuningMode -from .._tool_base import tool_manager -from . import ( - get_weather, - give_rhetorical_question, - give_final_response, - chat, - rag -) - - -SCENE = SceneType.COMMON -LAMBDA_NAME = "lambda_common_tools" - -tool_manager.register_tool({ - "name": "get_weather", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": get_weather.lambda_handler, - "tool_def": { - "name": "get_weather", - "description": "Get the current weather for `city_name`", - "parameters": { - "type": "object", - "properties": { - "city_name": { - "description": "The name of the city to be queried", - "type": "string" - }, - }, - "required": ["city_name"] - } - }, - "running_mode": ToolRuningMode.LOOP -}) - - -tool_manager.register_tool( - { - "name": "give_rhetorical_question", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": give_rhetorical_question.lambda_handler, - "tool_def": { - "name": "give_rhetorical_question", - "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", - "parameters": { - "type": "object", - "properties": { - "question": { - "description": "The rhetorical question to user", - "type": "string" - }, - }, - "required": ["question"], - }, - }, - "running_mode": ToolRuningMode.ONCE - } -) - - -tool_manager.register_tool( - { - "name": "give_final_response", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": give_final_response.lambda_handler, - "tool_def": { - "name": "give_final_response", - "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", - "parameters": { - "type": "object", - "properties": { - "response": { - "description": "Response to user", - "type": "string" - } - }, - "required": ["response"] - }, - }, - "running_mode": ToolRuningMode.ONCE - } -) - - -tool_manager.register_tool({ - "name": "chat", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": chat.lambda_handler, - "tool_def": { - "name": "chat", - "description": "casual talk with AI", - "parameters": { - "type": "object", - "properties": { - "response": { - "description": "response to users", - "type": "string" - }}, - "required": ["response"] - }, - }, - "running_mode": ToolRuningMode.ONCE -}) - - -tool_manager.register_tool({ - "name": "rag_tool", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": rag.lambda_handler, - "tool_def": { - "name": "rag_tool", - "description": "private knowledge", - "parameters": {} - }, - "running_mode": ToolRuningMode.ONCE -}) diff --git a/source/lambda/online/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/langchain_integration/tools/common_tools/rag.py index 2537bb5ca..8c0b6d736 100644 --- a/source/lambda/online/langchain_integration/tools/common_tools/rag.py +++ b/source/lambda/online/langchain_integration/tools/common_tools/rag.py @@ -4,16 +4,19 @@ LLMTaskType ) from common_logic.common_utils.lambda_invoke_utils import send_trace +from langchain_integration.langgraph_integration import get_current_app - -def rag(query,state): +def rag_tool(retriever_config:dict,state): + # state = event_body['state'] context_list = [] # add qq match results context_list.extend(state['qq_match_results']) figure_list = [] - retriever_params = state["chatbot_config"]["private_knowledge_config"] - retriever_params["query"] = state[retriever_params.get("retriever_config",{}).get("query_key","query")] + retriever_params = retriever_config + # retriever_params = state["chatbot_config"]["private_knowledge_config"] + retriever_params["query"] = state[retriever_config.get("query_key","query")] + # retriever_params["query"] = query output: str = invoke_lambda( event_body=retriever_params, lambda_name="Online_Functions", From 698f29be3d1036cf251f47f9434afb3b1711f004 Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Tue, 29 Oct 2024 23:15:16 +0800 Subject: [PATCH 046/110] update viperlightignore --- .viperlightignore | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.viperlightignore b/.viperlightignore index 2036fc010..d92077aaa 100644 --- a/.viperlightignore +++ b/.viperlightignore @@ -3,7 +3,9 @@ tests/data/ tests/buildspec_build.sh api_test/buildspec.yaml -api_test/buildspec-20241012.yaml +api_test/buildspec-third.yaml +api_test/config-third.json +api_test/config.json api_test/biz_logic/generated-client-rest-api/setup.py api_test/biz_logic/generated-client-rest-api/pyproject.toml api_test/test_case/* From 6ab050463cc03a06352b5b4fe3589082051a5c0a Mon Sep 17 00:00:00 2001 From: Cui <530051970@qq.com> Date: Wed, 30 Oct 2024 11:21:09 +0800 Subject: [PATCH 047/110] update third buildspec.yaml --- api_test/buildspec-third.yaml | 2 +- api_test/gen-report-lambda.py | 10 ++--- api_test/test_case/test_01_rest_document.py | 2 +- api_test/test_case/test_02_ws_chat.py | 2 +- api_test/test_case/test_03_rest_session.py | 2 +- api_test/test_case/test_04_rest_prompt.py | 2 +- api_test/test_case/test_05_rest_intention.py | 47 ++++++++++++++++++++ 7 files changed, 57 insertions(+), 10 deletions(-) create mode 100644 api_test/test_case/test_05_rest_intention.py diff --git a/api_test/buildspec-third.yaml b/api_test/buildspec-third.yaml index 6b00583b0..c691412b8 100644 --- a/api_test/buildspec-third.yaml +++ b/api_test/buildspec-third.yaml @@ -181,7 +181,7 @@ phases: cat .env source agentVenv/bin/activate # pytest test_case --continue-on-collection-errors --log-cli-level=INFO - pytest test_case --continue-on-collection-errors --log-cli-level=INFO --json-report --json-report-file=detail_third.json --html=report_third.html --self-contained-html > detail_third.log + pytest test_case --ignore=test_case/test_01_rest_document.py --continue-on-collection-errors --log-cli-level=INFO --json-report --json-report-file=detail_third.json --html=report_third.html --self-contained-html > detail_third.log popd test_complete_time=$(date +"%Y-%m-%d_%H-%M-%S") echo "----------------------------------------------------------------" diff --git a/api_test/gen-report-lambda.py b/api_test/gen-report-lambda.py index ce1987be2..42cbda42d 100644 --- a/api_test/gen-report-lambda.py +++ b/api_test/gen-report-lambda.py @@ -6,7 +6,7 @@ def __gen_completed_message(bucket: str, date: str, payload_type: int): log_key=f"{date}_detail_third.log" if payload_type == 0 else f"{date}_detail.log" response = __s3_client.get_object(Bucket=bucket, Key=detail_key) log_response = __s3_client.get_object(Bucket=bucket, Key=log_key) - message = '【BuiltIn KB】:\n' if payload_type == 1 else '【Third KB】:\n' + message = 'BuiltIn KB:\n' if payload_type == 1 else 'Third KB:\n' content = log_response['Body'].read().decode('utf-8') target_substring = "=================================== FAILURES ===================================" end_target_substring = "=============================== warnings summary ===============================" @@ -57,7 +57,7 @@ def __gen_completed_message(bucket: str, date: str, payload_type: int): def __gen_uncompleted_message(payload, payload_type): - message = '【BuiltIn KB】:\n' if payload_type == 1 else '【Third KB】:\n' + message = 'BuiltIn KB:\n' if payload_type == 1 else 'Third KB:\n' message+= "The stack deploy FAILED! The reason for the failure is as follows:" message+="\n\n" message+=payload['detail'] @@ -98,7 +98,7 @@ def lambda_handler(event, context): passed, failed, error, msg = __gen_completed_message(bucket, date, 1) total=passed + failed + error if total != 0: - coverage = passed/total + coverage = round(passed/total, 2)*100 else: msg = __gen_uncompleted_message(payload, 1) @@ -106,11 +106,11 @@ def lambda_handler(event, context): third_passed, third_failed, third_error, third_msg = __gen_completed_message(bucket, date, 0) third_total = third_passed + third_failed + third_error if third_total != 0: - third_coverage = third_passed/third_total + third_coverage = round(third_passed/third_total, 2)*100 else: third_msg = __gen_uncompleted_message(payload, 0) - message = f"Hi, team!\nThe following is API autotest report for {date}.\n\n ============================ summary =============================\n REPOSITORY: {payload['repository']}\n BRANCH: {payload['branch']}\n TEST RESULT: {status}\n Built-In KB Total:{passed + failed + error} Passed:{passed} Failed:{failed} Error:{error}\n Coverage:{coverage}\n Third KB Total:{third_passed + third_failed + third_error} Passed:{third_passed} Failed:{third_failed} Error:{third_error}\n Coverage:{third_coverage}\n\n\n " + message = f"Hi, team!\nThe following is API autotest report for {date}.\n\n ============================ summary =============================\n REPOSITORY: {payload['repository']}\n BRANCH: {payload['branch']}\n TEST RESULT: {status}\n Built-In KB:\n Total:{passed + failed + error}\n Passed:{passed} Failed:{failed} Error:{error}\n Coverage:{coverage}%\n Third KB:\n Total:{third_passed + third_failed + third_error}\n Passed:{third_passed} Failed:{third_failed} Error:{third_error}\n Coverage:{third_coverage}%\n\n\n " message += msg message += third_msg message+=f"\n\n More details click:\n Built-in KB: {payload['build_url']}\n Third KB: {third_payload['build_url']}" diff --git a/api_test/test_case/test_01_rest_document.py b/api_test/test_case/test_01_rest_document.py index 092ba5683..4c8e63091 100644 --- a/api_test/test_case/test_01_rest_document.py +++ b/api_test/test_case/test_01_rest_document.py @@ -17,7 +17,7 @@ partition = caller_identity['Arn'].split(':')[1] class TestDocument: - """DataSourceDiscovery test stubs""" + """Document test stubs""" upload_success_msg = 'The S3 presigned url is generated' upload_prefix_data = 'https://intelli-agent-apiconstructllmbotdocument' diff --git a/api_test/test_case/test_02_ws_chat.py b/api_test/test_case/test_02_ws_chat.py index 4554804b6..ba84882f2 100644 --- a/api_test/test_case/test_02_ws_chat.py +++ b/api_test/test_case/test_02_ws_chat.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) class TestChat: - """DataSourceDiscovery test stubs""" + """Chat test stubs""" @classmethod def setup_class(cls): diff --git a/api_test/test_case/test_03_rest_session.py b/api_test/test_case/test_03_rest_session.py index c68f1abb9..7878f1544 100644 --- a/api_test/test_case/test_03_rest_session.py +++ b/api_test/test_case/test_03_rest_session.py @@ -20,7 +20,7 @@ partition = caller_identity['Arn'].split(':')[1] class TestSession: - """DataSourceDiscovery test stubs""" + """Session test stubs""" @classmethod def setup_class(cls): diff --git a/api_test/test_case/test_04_rest_prompt.py b/api_test/test_case/test_04_rest_prompt.py index 1a2b11b4e..c45c02d2b 100644 --- a/api_test/test_case/test_04_rest_prompt.py +++ b/api_test/test_case/test_04_rest_prompt.py @@ -20,7 +20,7 @@ partition = caller_identity['Arn'].split(':')[1] class TestPrompt: - """DataSourceDiscovery test stubs""" + """Prompt test stubs""" @classmethod def setup_class(cls): diff --git a/api_test/test_case/test_05_rest_intention.py b/api_test/test_case/test_05_rest_intention.py new file mode 100644 index 000000000..e6fea379f --- /dev/null +++ b/api_test/test_case/test_05_rest_intention.py @@ -0,0 +1,47 @@ +import datetime +import os +import time +# import api_test.config as config +from dotenv import load_dotenv +import pytest +import requests + +from api_test.biz_logic.rest_api import openapi_client +# from api_test.biz_logic.rest_api import IntellapiconnnHdtwRWUXa + +from .utils import step +import logging +import boto3 + +logger = logging.getLogger(__name__) +sts = boto3.client('sts') +s3_client = boto3.client('s3') +caller_identity = boto3.client('sts').get_caller_identity() +partition = caller_identity['Arn'].split(':')[1] + +class TestIntention: + """Intention test stubs""" + + @classmethod + def setup_class(cls): + '''test case''' + step( + f"[{datetime.datetime.strftime(datetime.datetime.now(),'%Y-%m-%d')}] [{__name__}] Test start..." + ) + load_dotenv() + cls.configuration = openapi_client.Configuration(host=os.getenv('api_url')) + cls.api_client = openapi_client.ApiClient(cls.configuration) + cls.api_client.set_default_header("Authorization", f'Bearer {os.getenv("token")}') + cls.api_instance = openapi_client.DefaultApi(cls.api_client) + globals()["exe_ids"] = None + + @classmethod + def teardown_class(cls): + '''test case''' + step( + f"[{datetime.datetime.strftime(datetime.datetime.now(),'%Y-%m-%d')}] [{__name__}] Test end." + ) + + def test_39_list_prompt(self): + # TBD + pass \ No newline at end of file From 541c3e7eb130e83b41f10b9de051ae8da17ad288 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Thu, 31 Oct 2024 06:58:00 +0000 Subject: [PATCH 048/110] fix: one rag tool should respect to one index; refactor: add registerd rag tools to agent config --- .../online_entries/common_entry_v2.py | 32 ++++++++++++++++--- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py index e05aedcb1..5e4814de9 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py @@ -583,19 +583,36 @@ def register_rag_tool_from_config(event_body: dict): chatbot_manager = ChatbotManager.from_environ() chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) logger.info(chatbot) + registered_tool_names = [] for index_type, item_dict in chatbot.index_ids.items(): if index_type != IndexType.INTENTION: for index_content in item_dict["value"].values(): if "indexId" in index_content and "description" in index_content: + # Find retriever contain index_id + retrievers = event_body["chatbot_config"]["private_knowledge_config"]['retrievers'] + retriever = None + for retriever in retrievers: + if retriever["index_name"] == index_content["indexId"]: + break + assert retriever is not None,retrievers + reranks = event_body["chatbot_config"]["private_knowledge_config"]['reranks'] + index_name = index_content["indexId"] # TODO give specific retriever config ToolManager.register_common_rag_tool( - retriever_config=event_body["chatbot_config"]["private_knowledge_config"], - name=index_content["indexId"], + retriever_config={ + "retrievers":[retriever], + "reranks":[reranks[0]], + "llm_config": event_body["chatbot_config"]["private_knowledge_config"]['llm_config'] + }, + # event_body["chatbot_config"]["private_knowledge_config"], + name=index_name, scene=SceneType.COMMON, description=index_content["description"], pass_state=True, pass_state_name='state' ) + registered_tool_names.append(index_name) + return registered_tool_names def common_entry(event_body): @@ -604,7 +621,7 @@ def common_entry(event_body): :param event_body: The event body for lambda function. return: answer(str) """ - global app, app_agent + global app if app is None: app = build_graph(ChatbotState) @@ -633,10 +650,15 @@ def common_entry(event_body): message_id = event_body["custom_message_id"] ws_connection_id = event_body["ws_connection_id"] enable_trace = chatbot_config["enable_trace"] + agent_config = event_body["chatbot_config"]["agent_config"] # register as rag tool for each aos index - register_rag_tool_from_config(event_body) - + registered_tool_names = register_rag_tool_from_config(event_body) + # update private knowledge tool to agent config + for registered_tool_name in registered_tool_names: + if registered_tool_name not in agent_config['tools']: + agent_config['tools'].append(registered_tool_name) + # define all knowledge rag tool all_knowledge_rag_tool = ToolManager.register_common_rag_tool( retriever_config=event_body["chatbot_config"]["private_knowledge_config"], From ffb1f43cecffe1af079e2deee16c1809377a3884 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Sat, 2 Nov 2024 02:49:49 +0000 Subject: [PATCH 049/110] move langchain_integration into common_logic --- .../langchain_integration/__init__.py | 0 .../langchain_integration/chains/__init__.py | 13 ++ .../chains/__llm_chain_base.py | 26 ++++ .../chains/chat_chain.py | 0 .../chains/conversation_summary_chain.py | 0 .../chains/hyde_chain.py | 0 .../chains/intention_chain.py | 0 .../chains/marketing_chains/__init__.py | 0 .../mkt_conversation_summary.py | 0 .../chains/marketing_chains/mkt_rag_chain.py | 0 .../chains/query_rewrite_chain.py | 0 .../langchain_integration/chains/rag_chain.py | 1 - .../chains/retail_chains/__init__.py | 0 .../retail_chains/auto_evaluation_chain.py | 0 .../retail_conversation_summary_chain.py | 0 .../retail_tool_calling_chain_claude_xml.py | 0 .../retail_tool_calling_chain_json.py | 0 .../chains/stepback_chain.py | 0 .../chains/tool_calling_chain_api.py | 12 +- .../chains/tool_calling_chain_claude_xml.py | 0 .../chains/translate_chain.py | 0 .../chat_models/__init__.py | 0 .../chat_models/bedrock_models.py | 1 + .../chat_models/openai_models.py | 0 .../langgraph_integration.py | 12 ++ .../langchain_integration/tools/__init__.py | 55 ++++---- .../tools/common_tools/__init__retire.py | 121 ++++++++++++++++++ .../tools/common_tools/chat.py | 0 .../tools/common_tools/comparison_rag.py | 0 .../tools/common_tools/get_weather.py | 0 .../tools/common_tools/give_final_response.py | 0 .../common_tools/give_rhetorical_question.py | 0 .../tools/common_tools/rag.py | 2 +- .../tools/common_tools/step_back_rag.py | 0 34 files changed, 214 insertions(+), 29 deletions(-) rename source/lambda/online/{ => common_logic}/langchain_integration/__init__.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/__init__.py (92%) create mode 100644 source/lambda/online/common_logic/langchain_integration/chains/__llm_chain_base.py rename source/lambda/online/{ => common_logic}/langchain_integration/chains/chat_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/conversation_summary_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/hyde_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/intention_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/marketing_chains/__init__.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/marketing_chains/mkt_rag_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/query_rewrite_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/rag_chain.py (99%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/retail_chains/__init__.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/retail_chains/auto_evaluation_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/stepback_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/tool_calling_chain_api.py (93%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/tool_calling_chain_claude_xml.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chains/translate_chain.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chat_models/__init__.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/chat_models/bedrock_models.py (99%) rename source/lambda/online/{ => common_logic}/langchain_integration/chat_models/openai_models.py (100%) create mode 100644 source/lambda/online/common_logic/langchain_integration/langgraph_integration.py rename source/lambda/online/{ => common_logic}/langchain_integration/tools/__init__.py (90%) create mode 100644 source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/chat.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/comparison_rag.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/get_weather.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/give_final_response.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/give_rhetorical_question.py (100%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/rag.py (96%) rename source/lambda/online/{ => common_logic}/langchain_integration/tools/common_tools/step_back_rag.py (100%) diff --git a/source/lambda/online/langchain_integration/__init__.py b/source/lambda/online/common_logic/langchain_integration/__init__.py similarity index 100% rename from source/lambda/online/langchain_integration/__init__.py rename to source/lambda/online/common_logic/langchain_integration/__init__.py diff --git a/source/lambda/online/langchain_integration/chains/__init__.py b/source/lambda/online/common_logic/langchain_integration/chains/__init__.py similarity index 92% rename from source/lambda/online/langchain_integration/chains/__init__.py rename to source/lambda/online/common_logic/langchain_integration/chains/__init__.py index 0453a3ef5..9ba61fa11 100644 --- a/source/lambda/online/langchain_integration/chains/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/__init__.py @@ -161,6 +161,18 @@ def _import_retail_tool_calling_chain_claude_xml(): Claude3HaikuRetailToolCallingChain ) +def _import_tool_calling_chain_api(): + from .tool_calling_chain_api import ( + Claude21ToolCallingChain, + Claude2ToolCallingChain, + Claude35SonnetToolCallingChain, + Claude3HaikuToolCallingChain, + Claude3SonnetToolCallingChain, + Llama31Instruct70BToolCallingChain, + CohereCommandRPlusToolCallingChain, + MistraLlarge2407ToolCallingChain + ) + def _import_auto_evaluation_chain(): from .retail_chains.auto_evaluation_chain import ( @@ -188,6 +200,7 @@ def _load_module(intent_type): LLMTaskType.HYDE_TYPE: _import_hyde_chain, LLMTaskType.QUERY_REWRITE_TYPE: _import_query_rewrite_chain, LLMTaskType.TOOL_CALLING_XML: _import_tool_calling_chain_claude_xml, + LLMTaskType.TOOL_CALLING_API:_import_tool_calling_chain_api, LLMTaskType.RETAIL_CONVERSATION_SUMMARY_TYPE: _import_retail_conversation_summary_chain, LLMTaskType.RETAIL_TOOL_CALLING: _import_retail_tool_calling_chain_claude_xml, LLMTaskType.AUTO_EVALUATION: _import_auto_evaluation_chain diff --git a/source/lambda/online/common_logic/langchain_integration/chains/__llm_chain_base.py b/source/lambda/online/common_logic/langchain_integration/chains/__llm_chain_base.py new file mode 100644 index 000000000..98ae93d34 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/chains/__llm_chain_base.py @@ -0,0 +1,26 @@ +class LLMChainMeta(type): + def __new__(cls, name, bases, attrs): + new_cls = type.__new__(cls, name, bases, attrs) + if name == "LLMChain": + return new_cls + new_cls.model_map[new_cls.get_chain_id()] = new_cls + return new_cls + + +class LLMChain(metaclass=LLMChainMeta): + model_map = {} + + @classmethod + def get_chain_id(cls): + return cls._get_chain_id(cls.model_id, cls.intent_type) + + @staticmethod + def _get_chain_id(model_id, intent_type): + return f"{model_id}__{intent_type}" + + @classmethod + def get_chain(cls, model_id, intent_type, model_kwargs=None, **kwargs): + return cls.model_map[cls._get_chain_id(model_id, intent_type)].create_chain( + model_kwargs=model_kwargs, **kwargs + ) + diff --git a/source/lambda/online/langchain_integration/chains/chat_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/chat_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py diff --git a/source/lambda/online/langchain_integration/chains/conversation_summary_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/conversation_summary_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py diff --git a/source/lambda/online/langchain_integration/chains/hyde_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/hyde_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/hyde_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/hyde_chain.py diff --git a/source/lambda/online/langchain_integration/chains/intention_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/intention_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/intention_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/intention_chain.py diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py b/source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/__init__.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/marketing_chains/__init__.py rename to source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/__init__.py diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py b/source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py rename to source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/mkt_conversation_summary.py diff --git a/source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/mkt_rag_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/marketing_chains/mkt_rag_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/marketing_chains/mkt_rag_chain.py diff --git a/source/lambda/online/langchain_integration/chains/query_rewrite_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/query_rewrite_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/query_rewrite_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/query_rewrite_chain.py diff --git a/source/lambda/online/langchain_integration/chains/rag_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/rag_chain.py similarity index 99% rename from source/lambda/online/langchain_integration/chains/rag_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/rag_chain.py index be9d42efa..60c6b33b4 100644 --- a/source/lambda/online/langchain_integration/chains/rag_chain.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/rag_chain.py @@ -101,7 +101,6 @@ class Mixtral8x7bChatChain(Claude2RagLLMChain): model_id = LLMModelType.MIXTRAL_8X7B_INSTRUCT - from .chat_chain import GLM4Chat9BChatChain class GLM4Chat9BRagChain(GLM4Chat9BChatChain): diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/__init__.py b/source/lambda/online/common_logic/langchain_integration/chains/retail_chains/__init__.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/retail_chains/__init__.py rename to source/lambda/online/common_logic/langchain_integration/chains/retail_chains/__init__.py diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/retail_chains/auto_evaluation_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/retail_chains/auto_evaluation_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/retail_chains/auto_evaluation_chain.py diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_conversation_summary_chain.py diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py b/source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py rename to source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_tool_calling_chain_claude_xml.py diff --git a/source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py b/source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py rename to source/lambda/online/common_logic/langchain_integration/chains/retail_chains/retail_tool_calling_chain_json.py diff --git a/source/lambda/online/langchain_integration/chains/stepback_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/stepback_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/stepback_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/stepback_chain.py diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py similarity index 93% rename from source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py rename to source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py index be918e369..a5674de5b 100644 --- a/source/lambda/online/langchain_integration/chains/tool_calling_chain_api.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py @@ -125,8 +125,18 @@ def create_chain(cls, model_kwargs=None, **kwargs): model_id=cls.model_id, model_kwargs=model_kwargs, ) + llm = cls.bind_tools(llm,tools,fewshot_examples,fewshot_template=tool_fewshot_prompt) - chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | llm + + tool_calling_template = ChatPromptTemplate.from_messages( + [ + SystemMessage(content=agent_system_prompt), + ("placeholder", "{chat_history}"), + ("human", "{query}") + ] + ) + # chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | llm + chain = tool_calling_template | llm return chain diff --git a/source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_claude_xml.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/tool_calling_chain_claude_xml.py rename to source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_claude_xml.py diff --git a/source/lambda/online/langchain_integration/chains/translate_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/translate_chain.py similarity index 100% rename from source/lambda/online/langchain_integration/chains/translate_chain.py rename to source/lambda/online/common_logic/langchain_integration/chains/translate_chain.py diff --git a/source/lambda/online/langchain_integration/chat_models/__init__.py b/source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py similarity index 100% rename from source/lambda/online/langchain_integration/chat_models/__init__.py rename to source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py diff --git a/source/lambda/online/langchain_integration/chat_models/bedrock_models.py b/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py similarity index 99% rename from source/lambda/online/langchain_integration/chat_models/bedrock_models.py rename to source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py index 162ab998a..835313dd9 100644 --- a/source/lambda/online/langchain_integration/chat_models/bedrock_models.py +++ b/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py @@ -66,6 +66,7 @@ class MistralLarge2407(Claude2): class Llama3d1Instruct70B(Claude2): model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + class CohereCommandRPlus(Claude2): model_id = LLMModelType.COHERE_COMMAND_R_PLUS diff --git a/source/lambda/online/langchain_integration/chat_models/openai_models.py b/source/lambda/online/common_logic/langchain_integration/chat_models/openai_models.py similarity index 100% rename from source/lambda/online/langchain_integration/chat_models/openai_models.py rename to source/lambda/online/common_logic/langchain_integration/chat_models/openai_models.py diff --git a/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py b/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py new file mode 100644 index 000000000..61b264e0a --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py @@ -0,0 +1,12 @@ + +# set global langgraph app + +current_app = None + +def set_currrent_app(app): + global current_app + current_app = app + +def get_current_app(): + assert current_app is not None + return current_app \ No newline at end of file diff --git a/source/lambda/online/langchain_integration/tools/__init__.py b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py similarity index 90% rename from source/lambda/online/langchain_integration/tools/__init__.py rename to source/lambda/online/common_logic/langchain_integration/tools/__init__.py index 0e81f84b1..5d1afe164 100644 --- a/source/lambda/online/langchain_integration/tools/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py @@ -85,9 +85,12 @@ def convert_tool_def_to_pydantic(tool_id,tool_def:Union[dict,BaseModel]): use_schema_description=True ) result = parser.parse() + result = result.replace("from __future__ import annotations","") new_tool_module = types.ModuleType(tool_id) exec(result, new_tool_module.__dict__) - return new_tool_module.Model + model_cls = new_tool_module.Model + # model_cls.model_rebuild() + return model_cls @staticmethod @@ -108,7 +111,7 @@ def register_lc_tool( tool_identifier = cls.get_tool_identifier( scene=scene, name=name, - tool_identifier=None + tool_identifier=tool_identifier ) assert isinstance(tool,BaseTool),(tool,type(tool)) cls.tool_map[tool_identifier.tool_id] = tool @@ -246,14 +249,13 @@ def wrapper(*args, **kwargs): if "tool_identifier" in inspect.signature(func).parameters: kwargs = {**kwargs,"tool_identifier":tool_identifier} return func(*args, **kwargs) - TOOL_MOFULE_LOAD_FN_MAP[tool_identifier.tool_id] = func + TOOL_MOFULE_LOAD_FN_MAP[tool_identifier.tool_id] = wrapper return wrapper return decorator ############################# tool load func ###################### - @lazy_tool_load_decorator(SceneType.COMMON,"get_weather") def _load_common_weather_tool(tool_identifier:ToolIdentifier): from .common_tools import get_weather @@ -268,10 +270,10 @@ def _load_common_weather_tool(tool_identifier:ToolIdentifier): "required": ["city_name"] } ToolManager.register_func_as_tool( - tool_identifier.scene, - tool_identifier.name, - get_weather.get_weather, - tool_def, + func=get_weather.get_weather, + tool_def=tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, return_direct=False ) @@ -286,13 +288,14 @@ def _load_common_rhetorical_tool(tool_identifier:ToolIdentifier): "description": "The rhetorical question to user", "type": "string" }, - } + }, + "required": [] #["question"] } ToolManager.register_func_as_tool( - tool_identifier.scene, - tool_identifier.name, - give_rhetorical_question.give_rhetorical_question, - tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, + func=give_rhetorical_question.give_rhetorical_question, + tool_def=tool_def, return_direct=True ) @@ -312,10 +315,10 @@ def _load_common_final_response_tool(tool_identifier:ToolIdentifier): "required": ["response"] } ToolManager.register_func_as_tool( - tool_identifier.scene, - tool_identifier.name, - give_final_response.give_final_response, - tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, + func=give_final_response.give_final_response, + tool_def=tool_def, return_direct=True ) @@ -335,10 +338,10 @@ def _load_common_chat_tool(tool_identifier:ToolIdentifier): } ToolManager.register_func_as_tool( - tool_identifier.scene, - tool_identifier.name, - chat.chat, - tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, + func=chat.chat, + tool_def=tool_def, return_direct=True ) @@ -354,13 +357,13 @@ def _load_common_rag_tool(tool_identifier:ToolIdentifier): "type": "string" } }, - "required": ["query"] + # "required": ["query"] } ToolManager.register_func_as_tool( - tool_identifier.scene, - tool_identifier.name, - rag.rag_tool, - tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, + func=rag.rag_tool, + tool_def=tool_def, return_direct=True ) diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py new file mode 100644 index 000000000..c57069898 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py @@ -0,0 +1,121 @@ +from common_logic.common_utils.constant import SceneType, ToolRuningMode +from .._tool_base import tool_manager +from . import ( + get_weather, + give_rhetorical_question, + give_final_response, + chat, + rag +) + + +SCENE = SceneType.COMMON +LAMBDA_NAME = "lambda_common_tools" + +tool_manager.register_tool({ + "name": "get_weather", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": get_weather.lambda_handler, + "tool_def": { + "name": "get_weather", + "description": "Get the current weather for `city_name`", + "parameters": { + "type": "object", + "properties": { + "city_name": { + "description": "The name of the city to be queried", + "type": "string" + }, + }, + "required": ["city_name"] + } + }, + "running_mode": ToolRuningMode.LOOP +}) + + +tool_manager.register_tool( + { + "name": "give_rhetorical_question", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": give_rhetorical_question.lambda_handler, + "tool_def": { + "name": "give_rhetorical_question", + "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", + "parameters": { + "type": "object", + "properties": { + "question": { + "description": "The rhetorical question to user", + "type": "string" + }, + }, + "required": ["question"], + }, + }, + "running_mode": ToolRuningMode.ONCE + } +) + + +tool_manager.register_tool( + { + "name": "give_final_response", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": give_final_response.lambda_handler, + "tool_def": { + "name": "give_final_response", + "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", + "parameters": { + "type": "object", + "properties": { + "response": { + "description": "Response to user", + "type": "string" + } + }, + "required": ["response"] + }, + }, + "running_mode": ToolRuningMode.ONCE + } +) + + +tool_manager.register_tool({ + "name": "chat", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": chat.lambda_handler, + "tool_def": { + "name": "chat", + "description": "casual talk with AI", + "parameters": { + "type": "object", + "properties": { + "response": { + "description": "response to users", + "type": "string" + }}, + "required": ["response"] + }, + }, + "running_mode": ToolRuningMode.ONCE +}) + + +tool_manager.register_tool({ + "name": "rag_tool", + "scene": SCENE, + "lambda_name": LAMBDA_NAME, + "lambda_module_path": rag.lambda_handler, + "tool_def": { + "name": "rag_tool", + "description": "private knowledge", + "parameters": {} + }, + "running_mode": ToolRuningMode.ONCE +}) diff --git a/source/lambda/online/langchain_integration/tools/common_tools/chat.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/chat.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/chat.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/chat.py diff --git a/source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/comparison_rag.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/comparison_rag.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/comparison_rag.py diff --git a/source/lambda/online/langchain_integration/tools/common_tools/get_weather.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/get_weather.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/get_weather.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/get_weather.py diff --git a/source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/give_final_response.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/give_final_response.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/give_final_response.py diff --git a/source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/give_rhetorical_question.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/give_rhetorical_question.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/give_rhetorical_question.py diff --git a/source/lambda/online/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py similarity index 96% rename from source/lambda/online/langchain_integration/tools/common_tools/rag.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py index 8c0b6d736..e6a878fb8 100644 --- a/source/lambda/online/langchain_integration/tools/common_tools/rag.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py @@ -4,7 +4,7 @@ LLMTaskType ) from common_logic.common_utils.lambda_invoke_utils import send_trace -from langchain_integration.langgraph_integration import get_current_app +from common_logic.langchain_integration.langgraph_integration import get_current_app def rag_tool(retriever_config:dict,state): diff --git a/source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/step_back_rag.py similarity index 100% rename from source/lambda/online/langchain_integration/tools/common_tools/step_back_rag.py rename to source/lambda/online/common_logic/langchain_integration/tools/common_tools/step_back_rag.py From 4401f386d0985ec77ed6b7275d9943a2f29e7a5a Mon Sep 17 00:00:00 2001 From: zhouxss Date: Sat, 2 Nov 2024 08:12:41 +0000 Subject: [PATCH 050/110] modify agent prompt; add python_repl tool; adapt to pydantic v2 --- source/lambda/job/dep/llm_bot_dep/sm_utils.py | 15 +- .../common_utils/lambda_invoke_utils.py | 49 +- .../common_logic/common_utils/logger_utils.py | 13 +- .../common_logic/common_utils/prompt_utils.py | 27 +- .../common_utils/pydantic_models.py | 1 + .../chains/tool_calling_chain_api.py | 13 +- .../chat_models/__init__.py | 3 +- .../chat_models/bedrock_models.py | 16 +- .../langchain_integration/tools/__init__.py | 161 +--- .../tools/common_tools/__init__.py | 135 ++++ .../tools/common_tools/__init__retire.py | 121 --- .../tools/common_tools/rag.py | 10 +- source/lambda/online/functions/__init__.py | 14 +- .../functions_utils/retriever/retriever.py | 2 +- .../retriever/utils/websearch_retrievers.py | 2 +- .../main_utils/online_entries/__init__.py | 6 +- .../main_utils/online_entries/common_entry.py | 414 ++++++++--- .../online_entries/common_entry_v2.py | 700 ------------------ .../lambda_main/test/local_test_base.py | 2 +- .../test/main_local_test_common.py | 38 +- 20 files changed, 603 insertions(+), 1139 deletions(-) create mode 100644 source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py delete mode 100644 source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py delete mode 100644 source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py diff --git a/source/lambda/job/dep/llm_bot_dep/sm_utils.py b/source/lambda/job/dep/llm_bot_dep/sm_utils.py index f9a063268..7520ec9c5 100644 --- a/source/lambda/job/dep/llm_bot_dep/sm_utils.py +++ b/source/lambda/job/dep/llm_bot_dep/sm_utils.py @@ -1,11 +1,12 @@ import json import io from typing import Any, Dict, Iterator, List, Mapping, Optional -from langchain.llms.sagemaker_endpoint import LLMContentHandler, SagemakerEndpoint -from langchain.embeddings import SagemakerEndpointEmbeddings, BedrockEmbeddings -from langchain.embeddings.sagemaker_endpoint import EmbeddingsContentHandler +from langchain_community.llms import SagemakerEndpoint +from langchain_community.llms.sagemaker_endpoint import LLMContentHandler +from langchain_community.embeddings import SagemakerEndpointEmbeddings,BedrockEmbeddings +from langchain_community.embeddings.sagemaker_endpoint import EmbeddingsContentHandler from langchain.callbacks.manager import CallbackManagerForLLMRun -from langchain.llms.utils import enforce_stop_tokens +from langchain_community.llms.utils import enforce_stop_tokens from typing import Dict, List, Optional, Any,Iterator from langchain_core.outputs import GenerationChunk import boto3 @@ -234,12 +235,12 @@ def transform_output(self, output: bytes) -> str: function. See `boto3`_. docs for more info. .. _boto3: """ - content_type = "application/json" - accepts = "application/json" + content_type: str = "application/json" + accepts: str = "application/json" class Config: """Configuration for this pydantic object.""" - extra = Extra.forbid + extra = Extra.forbid.value @root_validator() def validate_environment(cls, values: Dict) -> Dict: diff --git a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py index f87ef838e..e5197c892 100644 --- a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py +++ b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py @@ -3,18 +3,21 @@ import importlib import json import time +import os from typing import Any, Dict, Optional, Callable, Union +import threading import requests from common_logic.common_utils.constant import StreamMessageType from common_logic.common_utils.logger_utils import get_logger from common_logic.common_utils.websocket_utils import is_websocket_request, send_to_ws_client -from langchain.pydantic_v1 import BaseModel, Field, root_validator +from pydantic import BaseModel, Field, model_validator + from .exceptions import LambdaInvokeError logger = get_logger("lambda_invoke_utils") - +thread_local = threading.local() __FUNC_NAME_MAP = { "query_preprocess": "Preprocess for multi-round conversation", @@ -25,6 +28,33 @@ "tool_execution": "Final tool result" } + +class StateContext: + + def __init__(self,state): + self.state=state + + @classmethod + def get_current_state(cls): + state = getattr(thread_local,'state',None) + assert state is not None,"There is not a valid state in current context" + return state + + @classmethod + def set_current_state(cls, state): + setattr(thread_local, 'state', state) + + @classmethod + def clear_state(cls): + setattr(thread_local, 'state', None) + + def __enter__(self): + self.set_current_state(self.state) + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear_state() + + class LAMBDA_INVOKE_MODE(enum.Enum): LAMBDA = "lambda" LOCAL = "local" @@ -54,26 +84,24 @@ class LambdaInvoker(BaseModel): region_name: str = None credentials_profile_name: Optional[str] = Field(default=None, exclude=True) - @root_validator() + @model_validator(mode="before") def validate_environment(cls, values: Dict): if values.get("client") is not None: return values try: import boto3 - try: - if values["credentials_profile_name"] is not None: + if values.get("credentials_profile_name") is not None: session = boto3.Session( profile_name=values["credentials_profile_name"] ) else: # use default credentials session = boto3.Session() - values["client"] = session.client( - "lambda", region_name=values["region_name"] + "lambda", + region_name=values.get("region_name",os.environ['AWS_REGION']) ) - except Exception as e: raise ValueError( "Could not load credentials to authenticate with AWS client. " @@ -284,7 +312,10 @@ def wrapper(state: Dict[str, Any]) -> Dict[str, Any]: current_stream_use, ws_connection_id, enable_trace) state['trace_infos'].append( f"Enter: {func.__name__}, time: {time.time()}") - output = func(state) + + with StateContext(state): + output = func(state) + current_monitor_infos = output.get(monitor_key, None) if current_monitor_infos is not None: send_trace(f"\n\n {current_monitor_infos}", diff --git a/source/lambda/online/common_logic/common_utils/logger_utils.py b/source/lambda/online/common_logic/common_utils/logger_utils.py index 22ba70327..5216c8ef2 100644 --- a/source/lambda/online/common_logic/common_utils/logger_utils.py +++ b/source/lambda/online/common_logic/common_utils/logger_utils.py @@ -1,4 +1,3 @@ - import logging import threading import os @@ -72,3 +71,15 @@ def print_llm_messages(msg, logger=logger): "ENABLE_PRINT_MESSAGES", 'True').lower() in ('true', '1', 't') if enable_print_messages: logger.info(msg) + + +def llm_messages_print_decorator(fn): + @wraps(fn) + def _inner(*args, **kwargs): + if args: + print_llm_messages(args) + if kwargs: + print_llm_messages(kwargs) + return fn(*args, **kwargs) + return _inner + diff --git a/source/lambda/online/common_logic/common_utils/prompt_utils.py b/source/lambda/online/common_logic/common_utils/prompt_utils.py index 644411a6b..0ff72f404 100644 --- a/source/lambda/online/common_logic/common_utils/prompt_utils.py +++ b/source/lambda/online/common_logic/common_utils/prompt_utils.py @@ -365,13 +365,13 @@ def prompt_template_render(self, prompt_template: dict): You are a helpful AI assistant. Today is {date},{weekday}. Here are some guidelines for you: -- Always start each answer with a reflection and write the reflection process in the tag. Please follow the steps below to think about it: +- Here are some tips for tool use: 1. Determine whether the current context is sufficient to answer the user's question. 2. If the current context is sufficient to answer the user's question, call the `give_final_response` tool. 3. If the current context is not sufficient to answer the user's question, you can consider calling the provided tools. 4. If the parameters of the tool you call do not meet the requirements, call the `give_rhetorical_question` tool to ask the user for more information. If the tool does not require parameters, do not call the `give_rhetorical_question` tool. 5. Finally, output the name of the tool you want to call. -- Always output with the same language as the content within . If the content is english, use english to output. If the content is chinese, use chinese to output. +- Always output with the same language as the content from user. If the content is english, use english to output. If the content is chinese, use chinese to output. """ register_prompt_templates( @@ -388,6 +388,29 @@ def prompt_template_render(self, prompt_template: dict): prompt_name="agent_system_prompt" ) + +# AGENT_SYSTEM_PROMPT_LLAMA = """\ +# You are a helpful AI assistant. Today is {date},{weekday}. +# Here are some guidelines for you: +# +# - Always referece each answer with a reflection and write the reflection process in the tag. Please follow the steps below to think about it: +# 1. Determine whether the current context is sufficient to answer the user's question. +# 2. If the current context is sufficient to answer the user's question, call the `give_final_response` tool. +# 3. If the current context is not sufficient to answer the user's question, you can consider calling the provided tools. +# 4. If the parameters of the tool you call do not meet the requirements, call the `give_rhetorical_question` tool to ask the user for more information. If the tool does not require parameters, do not call the `give_rhetorical_question` tool. +# 5. Finally, output the name of the tool you want to call. +# - Always output with the same language as the content within . If the content is english, use english to output. If the content is chinese, use chinese to output. +# """ + +# register_prompt_templates( +# model_ids=[ +# LLMModelType.LLAMA3_1_70B_INSTRUCT, +# ], +# task_type=LLMTaskType.TOOL_CALLING_API, +# prompt_template=AGENT_SYSTEM_PROMPT, +# prompt_name="agent_system_prompt" +# ) + # AGENT_GUIDELINES_PROMPT = """ # - 每次回答总是先进行思考,并将思考过程写在标签中。请你按照下面的步骤进行思考:。 # 2. 如果当前的上下文足够回答用户的问题,请调用 `give_final_response` 工具。 diff --git a/source/lambda/online/common_logic/common_utils/pydantic_models.py b/source/lambda/online/common_logic/common_utils/pydantic_models.py index 2ee51e36d..1296b372d 100644 --- a/source/lambda/online/common_logic/common_utils/pydantic_models.py +++ b/source/lambda/online/common_logic/common_utils/pydantic_models.py @@ -28,6 +28,7 @@ class ForbidBaseModel(BaseModel): class AllowBaseModel(BaseModel): class Config: extra = "allow" + use_enum_values = True class LLMConfig(AllowBaseModel): diff --git a/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py index a5674de5b..35c7c0fa4 100644 --- a/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py @@ -60,7 +60,9 @@ def get_common_system_prompt(cls,system_prompt_template:str): def bind_tools(cls,llm:BaseChatModel,tools:List[BaseTool], fewshot_examples=None, fewshot_template=None,tool_choice='any'): tools = [tool.model_copy() for tool in tools] if not fewshot_examples: - return llm.bind_tools(tools,tool_choice=tool_choice) + if getattr(llm,"enable_auto_tool_choice",True): + return llm.bind_tools(tools,tool_choice=tool_choice) + return llm.bind_tools(tools) # add fewshot examples to tool description tools_map = {tool.name:tool for tool in tools} @@ -84,7 +86,10 @@ def bind_tools(cls,llm:BaseChatModel,tools:List[BaseTool], fewshot_examples=None ) tool.description += "\n\n".join(examples_strs) - return llm.bind_tools(tools,tool_choice=tool_choice) + + if getattr(llm,"enable_auto_tool_choice",True): + return llm.bind_tools(tools,tool_choice=tool_choice) + return llm.bind_tools(tools) @classmethod @@ -132,7 +137,9 @@ def create_chain(cls, model_kwargs=None, **kwargs): [ SystemMessage(content=agent_system_prompt), ("placeholder", "{chat_history}"), - ("human", "{query}") + ("human", "{query}"), + ("placeholder", "{agent_tool_history}"), + ] ) # chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | llm diff --git a/source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py b/source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py index 8b9092ec4..56ee8dd53 100644 --- a/source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/chat_models/__init__.py @@ -36,7 +36,8 @@ def __new__(cls, name, bases, attrs): class Model(ModeMixins,metaclass=ModelMeta): - model_id = None + model_id: str = None + enable_auto_tool_choice: bool = True model_map = {} @classmethod diff --git a/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py b/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py index 835313dd9..4c82373f5 100644 --- a/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py +++ b/source/lambda/online/common_logic/langchain_integration/chat_models/bedrock_models.py @@ -1,19 +1,24 @@ import os -from langchain_aws.chat_models import ChatBedrockConverse +from langchain_aws.chat_models import ChatBedrockConverse as _ChatBedrockConverse from common_logic.common_utils.constant import ( MessageType, LLMModelType ) -from common_logic.common_utils.logger_utils import get_logger +from common_logic.common_utils.logger_utils import get_logger,llm_messages_print_decorator from . import Model - logger = get_logger("bedrock_model") + +class ChatBedrockConverse(_ChatBedrockConverse): + enable_auto_tool_choice: bool = True + + # Bedrock model type class Claude2(Model): model_id = LLMModelType.CLAUDE_2 default_model_kwargs = {"max_tokens": 2000, "temperature": 0.7, "top_p": 0.9} + enable_auto_tool_choice = True @classmethod def create_model(cls, model_kwargs=None, **kwargs): @@ -34,8 +39,11 @@ def create_model(cls, model_kwargs=None, **kwargs): credentials_profile_name=credentials_profile_name, region_name=region_name, model=cls.model_id, + enable_auto_tool_choice=cls.enable_auto_tool_choice, **model_kwargs, ) + llm.client.converse_stream = llm_messages_print_decorator(llm.client.converse_stream) + llm.client.converse = llm_messages_print_decorator(llm.client.converse) return llm @@ -65,10 +73,12 @@ class MistralLarge2407(Claude2): class Llama3d1Instruct70B(Claude2): model_id = LLMModelType.LLAMA3_1_70B_INSTRUCT + enable_auto_tool_choice = False class CohereCommandRPlus(Claude2): model_id = LLMModelType.COHERE_COMMAND_R_PLUS + enable_auto_tool_choice = False diff --git a/source/lambda/online/common_logic/langchain_integration/tools/__init__.py b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py index 5d1afe164..f25fd76e2 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py @@ -1,33 +1,3 @@ -# from langchain.tools.base import StructuredTool,BaseTool,tool -# StructuredTool.from_function -# from langchain_experimental.tools import PythonREPLTool -# from langchain_core.utils.function_calling import convert_to_openai_function -# from llama_index.core.tools import FunctionTool -# from langchain.tools import BaseTool -# from pydantic import create_model - -# from langchain_community.tools import WikipediaQueryRun - - -# builder = StateGraph(State) - - -# # Define nodes: these do the work -# builder.add_node("assistant", Assistant(part_1_assistant_runnable)) -# builder.add_node("tools", create_tool_node_with_fallback(part_1_tools)) -# # Define edges: these determine how the control flow moves -# builder.add_edge(START, "assistant") -# builder.add_conditional_edges( -# "assistant", -# tools_condition, -# ) -# builder.add_edge("tools", "assistant") - -# # The checkpointer lets the graph persist its state -# # this is a complete memory for the entire graph. -# memory = MemorySaver() -# part_1_graph = builder.compile(checkpointer=memory) - from typing import Optional,Union from pydantic import BaseModel import platform @@ -40,7 +10,7 @@ from datamodel_code_generator.model import get_data_model_types from datamodel_code_generator.parser.jsonschema import JsonSchemaParser from langchain.tools.base import StructuredTool as _StructuredTool ,BaseTool -from langchain_core.pydantic_v1 import create_model,BaseModel +# from langchain_core.pydantic_v1 import BaseModel from common_logic.common_utils.constant import SceneType from common_logic.common_utils.lambda_invoke_utils import invoke_with_lambda from functools import partial @@ -48,10 +18,9 @@ class StructuredTool(_StructuredTool): - pass_state:bool = False # if pass state into tool invoke - pass_state_name:str = "state" # pass state name - - + pass + # pass_state:bool = False # if pass state into tool invoke + # pass_state_name:str = "state" # pass state name class ToolIdentifier(BaseModel): scene: SceneType @@ -89,7 +58,6 @@ def convert_tool_def_to_pydantic(tool_id,tool_def:Union[dict,BaseModel]): new_tool_module = types.ModuleType(tool_id) exec(result, new_tool_module.__dict__) model_cls = new_tool_module.Model - # model_cls.model_rebuild() return model_cls @@ -191,8 +159,8 @@ def register_common_rag_tool( name=None, tool_identifier=None, return_direct=False, - pass_state=True, - pass_state_name='state' + # pass_state=True, + # pass_state_name='state' ): assert scene == SceneType.COMMON, scene from .common_tools.rag import rag_tool @@ -218,8 +186,8 @@ class Config: ), description=description, return_direct=return_direct, - pass_state=pass_state, - pass_state_name=pass_state_name + # pass_state=pass_state, + # pass_state_name=pass_state_name ) return ToolManager.register_lc_tool( @@ -254,118 +222,7 @@ def wrapper(*args, **kwargs): return decorator -############################# tool load func ###################### - -@lazy_tool_load_decorator(SceneType.COMMON,"get_weather") -def _load_common_weather_tool(tool_identifier:ToolIdentifier): - from .common_tools import get_weather - tool_def = { - "description": "Get the current weather for `city_name`", - "properties": { - "city_name": { - "description": "The name of the city to be queried", - "type": "string" - }, - }, - "required": ["city_name"] - } - ToolManager.register_func_as_tool( - func=get_weather.get_weather, - tool_def=tool_def, - scene=tool_identifier.scene, - name=tool_identifier.name, - return_direct=False - ) - - -@lazy_tool_load_decorator(SceneType.COMMON,"give_rhetorical_question") -def _load_common_rhetorical_tool(tool_identifier:ToolIdentifier): - from .common_tools import give_rhetorical_question - tool_def = { - "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", - "properties": { - "question": { - "description": "The rhetorical question to user", - "type": "string" - }, - }, - "required": [] #["question"] - } - ToolManager.register_func_as_tool( - scene=tool_identifier.scene, - name=tool_identifier.name, - func=give_rhetorical_question.give_rhetorical_question, - tool_def=tool_def, - return_direct=True - ) - - -@lazy_tool_load_decorator(SceneType.COMMON,"give_final_response") -def _load_common_final_response_tool(tool_identifier:ToolIdentifier): - from .common_tools import give_final_response - - tool_def = { - "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", - "properties": { - "response": { - "description": "Response to user", - "type": "string" - } - }, - "required": ["response"] - } - ToolManager.register_func_as_tool( - scene=tool_identifier.scene, - name=tool_identifier.name, - func=give_final_response.give_final_response, - tool_def=tool_def, - return_direct=True - ) - - -@lazy_tool_load_decorator(SceneType.COMMON,"chat") -def _load_common_chat_tool(tool_identifier:ToolIdentifier): - from .common_tools import chat - tool_def = { - "description": "casual talk with AI", - "properties": { - "response": { - "description": "response to users", - "type": "string" - } - }, - "required": ["response"] - } - - ToolManager.register_func_as_tool( - scene=tool_identifier.scene, - name=tool_identifier.name, - func=chat.chat, - tool_def=tool_def, - return_direct=True - ) - - -@lazy_tool_load_decorator(SceneType.COMMON,"rag_tool") -def _load_common_rag_tool(tool_identifier:ToolIdentifier): - from .common_tools import rag - tool_def = { - "description": "private knowledge", - "properties": { - "query": { - "description": "query for retrieve", - "type": "string" - } - }, - # "required": ["query"] - } - ToolManager.register_func_as_tool( - scene=tool_identifier.scene, - name=tool_identifier.name, - func=rag.rag_tool, - tool_def=tool_def, - return_direct=True - ) +from . import common_tools diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py new file mode 100644 index 000000000..170daa44f --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py @@ -0,0 +1,135 @@ +from .. import lazy_tool_load_decorator,ToolIdentifier,ToolManager +from common_logic.common_utils.constant import SceneType + + +@lazy_tool_load_decorator(SceneType.COMMON,"get_weather") +def _load_weather_tool(tool_identifier:ToolIdentifier): + from . import get_weather + tool_def = { + "description": "Get the current weather for `city_name`", + "properties": { + "city_name": { + "description": "The name of the city to be queried", + "type": "string" + }, + }, + "required": ["city_name"] + } + ToolManager.register_func_as_tool( + func=get_weather.get_weather, + tool_def=tool_def, + scene=tool_identifier.scene, + name=tool_identifier.name, + return_direct=False + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"give_rhetorical_question") +def _load_rhetorical_tool(tool_identifier:ToolIdentifier): + from . import give_rhetorical_question + tool_def = { + "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", + "properties": { + "question": { + "description": "The rhetorical question to user", + "type": "string" + }, + }, + "required": ["question"] + } + ToolManager.register_func_as_tool( + scene=tool_identifier.scene, + name=tool_identifier.name, + func=give_rhetorical_question.give_rhetorical_question, + tool_def=tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"give_final_response") +def _load_final_response_tool(tool_identifier:ToolIdentifier): + from . import give_final_response + + tool_def = { + "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", + "properties": { + "response": { + "description": "Response to user", + "type": "string" + } + }, + "required": ["response"] + } + ToolManager.register_func_as_tool( + scene=tool_identifier.scene, + name=tool_identifier.name, + func=give_final_response.give_final_response, + tool_def=tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"chat") +def _load_chat_tool(tool_identifier:ToolIdentifier): + from . import chat + tool_def = { + "description": "casual talk with AI", + "properties": { + "response": { + "description": "response to users", + "type": "string" + } + }, + "required": ["response"] + } + + ToolManager.register_func_as_tool( + scene=tool_identifier.scene, + name=tool_identifier.name, + func=chat.chat, + tool_def=tool_def, + return_direct=True + ) + + +@lazy_tool_load_decorator(SceneType.COMMON,"rag_tool") +def _load_rag_tool(tool_identifier:ToolIdentifier): + from . import rag + tool_def = { + "description": "private knowledge", + "properties": { + "query": { + "description": "query for retrieve", + "type": "string" + } + }, + # "required": ["query"] + } + ToolManager.register_func_as_tool( + scene=tool_identifier.scene, + name=tool_identifier.name, + func=rag.rag_tool, + tool_def=tool_def, + return_direct=True + ) + + + +################### langchain tools ####################### + +@lazy_tool_load_decorator(SceneType.COMMON,"python_repl") +def _loadd_python_repl_tool(tool_identifier:ToolIdentifier): + from langchain_core.tools import Tool + from langchain_experimental.utilities import PythonREPL + python_repl = PythonREPL() + repl_tool = Tool( + name="python_repl", + description="A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you SHOULD print it out with `print(...)`.", + func=python_repl.run + ) + ToolManager.register_lc_tool( + scene=tool_identifier.scene, + name=tool_identifier.name, + tool=repl_tool + ) + diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py deleted file mode 100644 index c57069898..000000000 --- a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__retire.py +++ /dev/null @@ -1,121 +0,0 @@ -from common_logic.common_utils.constant import SceneType, ToolRuningMode -from .._tool_base import tool_manager -from . import ( - get_weather, - give_rhetorical_question, - give_final_response, - chat, - rag -) - - -SCENE = SceneType.COMMON -LAMBDA_NAME = "lambda_common_tools" - -tool_manager.register_tool({ - "name": "get_weather", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": get_weather.lambda_handler, - "tool_def": { - "name": "get_weather", - "description": "Get the current weather for `city_name`", - "parameters": { - "type": "object", - "properties": { - "city_name": { - "description": "The name of the city to be queried", - "type": "string" - }, - }, - "required": ["city_name"] - } - }, - "running_mode": ToolRuningMode.LOOP -}) - - -tool_manager.register_tool( - { - "name": "give_rhetorical_question", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": give_rhetorical_question.lambda_handler, - "tool_def": { - "name": "give_rhetorical_question", - "description": "If the user's question is not clear and specific, resulting in the inability to call other tools, please call this tool to ask the user a rhetorical question", - "parameters": { - "type": "object", - "properties": { - "question": { - "description": "The rhetorical question to user", - "type": "string" - }, - }, - "required": ["question"], - }, - }, - "running_mode": ToolRuningMode.ONCE - } -) - - -tool_manager.register_tool( - { - "name": "give_final_response", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": give_final_response.lambda_handler, - "tool_def": { - "name": "give_final_response", - "description": "If none of the other tools need to be called, call the current tool to complete the direct response to the user.", - "parameters": { - "type": "object", - "properties": { - "response": { - "description": "Response to user", - "type": "string" - } - }, - "required": ["response"] - }, - }, - "running_mode": ToolRuningMode.ONCE - } -) - - -tool_manager.register_tool({ - "name": "chat", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": chat.lambda_handler, - "tool_def": { - "name": "chat", - "description": "casual talk with AI", - "parameters": { - "type": "object", - "properties": { - "response": { - "description": "response to users", - "type": "string" - }}, - "required": ["response"] - }, - }, - "running_mode": ToolRuningMode.ONCE -}) - - -tool_manager.register_tool({ - "name": "rag_tool", - "scene": SCENE, - "lambda_name": LAMBDA_NAME, - "lambda_module_path": rag.lambda_handler, - "tool_def": { - "name": "rag_tool", - "description": "private knowledge", - "parameters": {} - }, - "running_mode": ToolRuningMode.ONCE -}) diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py index e6a878fb8..8d6ce7d3a 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py @@ -1,13 +1,13 @@ -from common_logic.common_utils.lambda_invoke_utils import invoke_lambda +from common_logic.common_utils.lambda_invoke_utils import invoke_lambda,StateContext from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb from common_logic.common_utils.constant import ( LLMTaskType ) from common_logic.common_utils.lambda_invoke_utils import send_trace -from common_logic.langchain_integration.langgraph_integration import get_current_app -def rag_tool(retriever_config:dict,state): - + +def rag_tool(retriever_config:dict,query=None): + state = StateContext.get_current_state() # state = event_body['state'] context_list = [] # add qq match results @@ -15,7 +15,7 @@ def rag_tool(retriever_config:dict,state): figure_list = [] retriever_params = retriever_config # retriever_params = state["chatbot_config"]["private_knowledge_config"] - retriever_params["query"] = state[retriever_config.get("query_key","query")] + retriever_params["query"] = query or state[retriever_config.get("query_key","query")] # retriever_params["query"] = query output: str = invoke_lambda( event_body=retriever_params, diff --git a/source/lambda/online/functions/__init__.py b/source/lambda/online/functions/__init__.py index 2497bbe94..12aa317a6 100644 --- a/source/lambda/online/functions/__init__.py +++ b/source/lambda/online/functions/__init__.py @@ -1,11 +1,11 @@ # tool -from ._tool_base import get_tool_by_name,Tool,tool_manager +# from ._tool_base import get_tool_by_name,Tool,tool_manager -def init_common_tools(): - from . import lambda_common_tools +# def init_common_tools(): +# from . import lambda_common_tools -def init_aws_qa_tools(): - from . import lambda_aws_qa_tools +# def init_aws_qa_tools(): +# from . import lambda_aws_qa_tools -def init_retail_tools(): - from . import lambda_retail_tools \ No newline at end of file +# def init_retail_tools(): +# from . import lambda_retail_tools \ No newline at end of file diff --git a/source/lambda/online/functions/functions_utils/retriever/retriever.py b/source/lambda/online/functions/functions_utils/retriever/retriever.py index 694f8fbdd..977855e2f 100644 --- a/source/lambda/online/functions/functions_utils/retriever/retriever.py +++ b/source/lambda/online/functions/functions_utils/retriever/retriever.py @@ -23,9 +23,9 @@ GoogleRetriever, ) from langchain.retrievers import ( - AmazonKnowledgeBasesRetriever, ContextualCompressionRetriever, ) +from langchain_community.retrievers import AmazonKnowledgeBasesRetriever from langchain.retrievers.merger_retriever import MergerRetriever from langchain.schema.runnable import RunnableLambda, RunnablePassthrough from langchain_community.retrievers import AmazonKnowledgeBasesRetriever diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py b/source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py index f9b67d609..babdeb9b3 100644 --- a/source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py +++ b/source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py @@ -9,7 +9,7 @@ logger = logging.getLogger() logger.setLevel(logging.INFO) -from langchain.utilities import GoogleSearchAPIWrapper +from langchain_community.utilities import GoogleSearchAPIWrapper from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document from langchain.schema.retriever import BaseRetriever diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/__init__.py b/source/lambda/online/lambda_main/main_utils/online_entries/__init__.py index 5f4c315ba..bca58edd7 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/__init__.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/__init__.py @@ -1,14 +1,14 @@ from common_logic.common_utils.constant import EntryType -from functions import get_tool_by_name,init_common_tools,init_retail_tools +# from functions import get_tool_by_name,init_common_tools,init_retail_tools def get_common_entry(): from .common_entry import main_chain_entry - init_common_tools() + # init_common_tools() return main_chain_entry def get_retail_entry(): from .retail_entry import main_chain_entry - init_retail_tools() + # init_retail_tools() return main_chain_entry entry_map = { diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index 8a59c4379..21445ad12 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -1,5 +1,7 @@ import json -from typing import Annotated, Any, TypedDict +import traceback +from typing import Annotated, Any, TypedDict, List,Union +import copy from common_logic.common_utils.chatbot_utils import ChatbotManager from common_logic.common_utils.constant import ( @@ -7,7 +9,6 @@ IndexType, LLMTaskType, SceneType, - ToolRuningMode, ) from common_logic.common_utils.lambda_invoke_utils import ( invoke_lambda, @@ -15,20 +16,26 @@ node_monitor_wrapper, send_trace, ) +from langchain_core.messages import ToolMessage,AIMessage from common_logic.common_utils.logger_utils import get_logger from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb from common_logic.common_utils.python_utils import add_messages, update_nest_dict from common_logic.common_utils.response_utils import process_response from common_logic.common_utils.serialization_utils import JSONEncoder -from functions import get_tool_by_name -from functions._tool_base import tool_manager -from functions.lambda_common_tools import rag -from lambda_main.main_utils.online_entries.agent_base import ( - build_agent_graph, - tool_execution, -) +from common_logic.langchain_integration.tools import ToolManager +from langchain_core.tools import BaseTool +from langchain_core.messages.tool import ToolCall +from langgraph.prebuilt.tool_node import ToolNode,TOOL_CALL_ERROR_TEMPLATE +from common_logic.langchain_integration.chains import LLMChain + + +# from lambda_main.main_utils.online_entries.agent_base import ( +# build_agent_graph, +# tool_execution, +# ) from lambda_main.main_utils.parse_config import CommonConfigParser from langgraph.graph import END, StateGraph +from common_logic.langchain_integration.langgraph_integration import set_currrent_app logger = get_logger("common_entry") @@ -84,23 +91,27 @@ class ChatbotState(TypedDict): ########### agent states ########### # current output of agent - agent_current_output: dict - # record messages during agent tool choose and calling, including agent message, tool ouput and error messages - agent_tool_history: Annotated[list[dict], add_messages] - # the maximum number that agent node can be called - agent_repeated_call_limit: int - # the current call time of agent - agent_current_call_number: int # - # whehter the current call time is less than maximum number of agent call - agent_repeated_call_validation: bool - # function calling - # whether the output of agent can be parsed as the valid tool calling - function_calling_parse_ok: bool - # whether the current parsed tool calling is run once - function_calling_is_run_once: bool - # current tool calls - function_calling_parsed_tool_calls: list - current_agent_tools_def: list + # agent_current_output: dict + # # record messages during agent tool choose and calling, including agent message, tool ouput and error messages + agent_tool_history: Annotated[List[Union[AIMessage,ToolMessage]], add_messages] + # # the maximum number that agent node can be called + # agent_repeated_call_limit: int + # # the current call time of agent + # agent_current_call_number: int # + # # whehter the current call time is less than maximum number of agent call + # agent_repeated_call_validation: bool + # # function calling + # # whether the output of agent can be parsed as the valid tool calling + # function_calling_parse_ok: bool + # # whether the current parsed tool calling is run once + exit_tool_calling: bool + # # current tool calls + # function_calling_parsed_tool_calls: list + # current_agent_tools_def: list + last_tool_messages: List[ToolMessage] + tools: List[BaseTool] + # the global rag tool use all knowledge + all_knowledge_rag_tool: BaseTool def is_null_or_empty(value): @@ -218,38 +229,57 @@ def agent(state: ChatbotState): # 2. for the first time, agent decides to give final results # deal with once tool calling - if ( - state["agent_repeated_call_validation"] - and state["function_calling_parse_ok"] - and state["agent_tool_history"] - ): - tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ - "raw_tool_call_results" - ][0] - tool_name = tool_execute_res["name"] - output = tool_execute_res["output"] - tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) - if tool.running_mode == ToolRuningMode.ONCE: + last_tool_messages = state["last_tool_messages"] + if last_tool_messages and len(last_tool_messages) == 1: + last_tool_message = last_tool_messages[0] + tool:BaseTool = ToolManager.get_tool( + scene=SceneType.COMMON, + name=last_tool_message.name + ) + if tool.return_direct: send_trace("once tool", enable_trace=state["enable_trace"]) - return {"answer": output["result"], "function_calling_is_run_once": True} + return {"answer": last_tool_message.content, "exit_tool_calling": True} + + # tool_execute_res = last_tool_calls_results[-1].additional_kwargs[ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # output = tool_execute_res["output"] + # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) + # if tool.running_mode == ToolRuningMode.ONCE: + # send_trace("once tool", enable_trace=state["enable_trace"]) + # return {"answer": output["result"], "exit_tool_calling": True} + + + + # if state["agent_tool_history"] and state["agent_tool_history"][-1].type=="tool_call": + # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # output = tool_execute_res["output"] + # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) + # if tool.running_mode == ToolRuningMode.ONCE: + # send_trace("once tool", enable_trace=state["enable_trace"]) + # return {"answer": output["result"], "exit_tool_calling": True} no_intention_condition = not state["intent_fewshot_examples"] - first_tool_final_response = False - if ( - (state["agent_current_call_number"] == 1) - and state["function_calling_parse_ok"] - and state["agent_tool_history"] - ): - tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ - "raw_tool_call_results" - ][0] - tool_name = tool_execute_res["name"] - if tool_name == "give_final_response": - first_tool_final_response = True + # first_tool_final_response = False + # if ( + # (state["agent_current_call_number"] == 1) + # and state["function_calling_parse_ok"] + # and state["agent_tool_history"] + # ): + # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ + # "raw_tool_call_results" + # ][0] + # tool_name = tool_execute_res["name"] + # if tool_name == "give_final_response": + # first_tool_final_response = True if ( no_intention_condition - or first_tool_final_response + # or first_tool_final_response or state["chatbot_config"]["agent_config"]["only_use_rag_tool"] ): if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: @@ -259,28 +289,67 @@ def agent(state: ChatbotState): "no_intention_condition, switch to rag tool", enable_trace=state["enable_trace"], ) - elif first_tool_final_response: - send_trace( - "first tool is final response, switch to rag tool", - enable_trace=state["enable_trace"], + + all_knowledge_rag_tool = state['all_knowledge_rag_tool'] + return AIMessage(content="",tool_calls=[ + ToolCall( + name=all_knowledge_rag_tool.name, + args={} ) + ]) + + # normal call + agent_config = state["chatbot_config"]['agent_config'] + + tools_name = list(set(state['intent_fewshot_tools'] + agent_config['tools'])) + # get tools from tool names + tools = [ + ToolManager.get_tool( + scene=SceneType.COMMON, + name=name + ) + for name in tools_name + ] + llm_config = { + **agent_config['llm_config'], + "tools": tools, + "fewshot_examples": state['intent_fewshot_examples'], + } + group_name = state['chatbot_config']['group_name'] + chatbot_id = state['chatbot_config']['chatbot_id'] + prompt_templates_from_ddb = get_prompt_templates_from_ddb( + group_name, + model_id = llm_config['model_id'], + task_type=LLMTaskType.TOOL_CALLING_API, + chatbot_id=chatbot_id + ) + llm_config.update(**prompt_templates_from_ddb) - return { - "function_calling_parse_ok": True, - "agent_repeated_call_validation": True, - "function_calling_parsed_tool_calls": [ - { - "name": "rag_tool", - "kwargs": {}, - "model_id": state["chatbot_config"]["agent_config"]["llm_config"][ - "model_id" - ], - } - ], - } - response = app_agent.invoke(state) + tool_calling_chain = LLMChain.get_chain( + intent_type=LLMTaskType.TOOL_CALLING_API, + scene=SceneType.COMMON, + **llm_config + ) + + + # print(state['chat_history'] + state['agent_tool_history']) + agent_message:AIMessage = tool_calling_chain.invoke({ + "query":state['query'], + "chat_history":state['chat_history'], + "agent_tool_history":state['agent_tool_history'] + }) + + + send_trace( + # f"\n\n**agent_current_output:** \n{agent_message}\n\n **agent_current_call_number:** {agent_current_call_number}", + f"\n\n**agent_current_output:** \n{agent_message}\n\n", + state["stream"], + state["ws_connection_id"] + ) + if not agent_message.tool_calls: + return {"answer": agent_message.content, "exit_tool_calling": True} - return response + return {"agent_tool_history":[agent_message],"tools":tools} @node_monitor_wrapper @@ -314,6 +383,78 @@ def llm_direct_results_generation(state: ChatbotState): return {"answer": answer} +@node_monitor_wrapper +def tool_execution(state): + """executor lambda + Args: + state (NestUpdateState): _description_ + + Returns: + _type_: _description_ + """ + tools:List[BaseTool] = state['tools'] + + + def handle_tool_errors(e): + content = TOOL_CALL_ERROR_TEMPLATE.format(error=repr(e)) + logger.error(f"Tool execution error:\n{traceback.format_exc()}") + return content + + tool_node = ToolNode( + tools, + handle_tool_errors=handle_tool_errors + ) + last_agent_message:AIMessage = state["agent_tool_history"][-1] + + # print(last_agent_message) + # pass state to tools if needed + # tools_map = {tool.name:tool for tool in tools} + tool_calls = last_agent_message.tool_calls + # tool_calls:List[ToolCall] = copy.deepcopy(last_agent_message.tool_calls) + + # for tool_call in tool_calls: + # tool = tools_map[tool_call['name']] + # if tool.pass_state: + # tool_call['args'].update({tool.pass_state_name:state}) + + tool_messages:List[ToolMessage] = tool_node.invoke( + [AIMessage(content="",tool_calls=tool_calls)] + ) + + print("tool result",tool_messages[0].content) + + # tool_calls = state['function_calling_parsed_tool_calls'] + # assert len(tool_calls) == 1, tool_calls + # tool_call_results = [] + # for tool_call in tool_calls: + # tool_name = tool_call["name"] + # tool_kwargs = tool_call['kwargs'] + # # call tool + # output = invoke_lambda( + # event_body = { + # "tool_name":tool_name, + # "state":state, + # "kwargs":tool_kwargs + # }, + # lambda_name="Online_Tool_Execute", + # lambda_module_path="functions.lambda_tool", + # handler_name="lambda_handler" + # ) + # tool_call_results.append({ + # "name": tool_name, + # "output": output, + # "kwargs": tool_call['kwargs'], + # "model_id": tool_call['model_id'] + # }) + + # output = format_tool_call_results(tool_call['model_id'],tool_call_results) + send_trace(f'**tool_execute_res:** \n{tool_messages}', enable_trace=state["enable_trace"]) + return { + "agent_tool_history": tool_messages, + "last_tool_messages": tool_messages + } + + def final_results_preparation(state: ChatbotState): app_response = process_response(state["event_body"], state) return {"app_response": app_response} @@ -337,18 +478,17 @@ def intent_route(state: dict): def agent_route(state: dict): - if state.get("function_calling_is_run_once", False): + if state.get("exit_tool_calling", False): return "no need tool calling" + # state["agent_repeated_call_validation"] = ( + # state["agent_current_call_number"] < state["agent_repeated_call_limit"] + # ) + # if state["agent_repeated_call_validation"]: - state["agent_repeated_call_validation"] = ( - state["agent_current_call_number"] < state["agent_repeated_call_limit"] - ) - - if state["agent_repeated_call_validation"]: - return "valid tool calling" - else: - # TODO give final strategy - raise RuntimeError + return "valid tool calling" + # else: + # # TODO give final strategy + # raise RuntimeError ############################# @@ -358,6 +498,7 @@ def agent_route(state: dict): def build_graph(chatbot_state_cls): workflow = StateGraph(chatbot_state_cls) + # add node for all chat/rag/agent mode workflow.add_node("query_preprocess", query_preprocess) # chat mode @@ -430,30 +571,29 @@ def build_graph(chatbot_state_cls): ##################################### # define online sub-graph for agent # ##################################### -app_agent = None +# app_agent = None app = None -def register_rag_tool( - name: str, - description: str, - scene=SceneType.COMMON, - lambda_name: str = "lambda_common_tools", -): - tool_manager.register_tool( - { - "name": name, - "scene": scene, - "lambda_name": lambda_name, - "lambda_module_path": rag.lambda_handler, - "tool_def": { - "name": name, - "description": description, - }, - "running_mode": ToolRuningMode.ONCE, - } - ) - +# def register_rag_tool( +# name: str, +# description: str, +# scene=SceneType.COMMON, +# lambda_name: str = "lambda_common_tools", +# ): +# tool_manager.register_tool( +# { +# "name": name, +# "scene": scene, +# "lambda_name": lambda_name, +# "lambda_module_path": rag.lambda_handler, +# "tool_def": { +# "name": name, +# "description": description, +# }, +# "running_mode": ToolRuningMode.ONCE, +# } +# ) def register_rag_tool_from_config(event_body: dict): group_name = event_body.get("chatbot_config").get("group_name", "Admin") @@ -461,13 +601,36 @@ def register_rag_tool_from_config(event_body: dict): chatbot_manager = ChatbotManager.from_environ() chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) logger.info(chatbot) + registered_tool_names = [] for index_type, item_dict in chatbot.index_ids.items(): if index_type != IndexType.INTENTION: for index_content in item_dict["value"].values(): if "indexId" in index_content and "description" in index_content: - register_rag_tool( - index_content["indexId"], index_content["description"] + # Find retriever contain index_id + retrievers = event_body["chatbot_config"]["private_knowledge_config"]['retrievers'] + retriever = None + for retriever in retrievers: + if retriever["index_name"] == index_content["indexId"]: + break + assert retriever is not None,retrievers + reranks = event_body["chatbot_config"]["private_knowledge_config"]['reranks'] + index_name = index_content["indexId"] + # TODO give specific retriever config + ToolManager.register_common_rag_tool( + retriever_config={ + "retrievers":[retriever], + "reranks":[reranks[0]], + "llm_config": event_body["chatbot_config"]["private_knowledge_config"]['llm_config'] + }, + # event_body["chatbot_config"]["private_knowledge_config"], + name=index_name, + scene=SceneType.COMMON, + description=index_content["description"], + # pass_state=True, + # pass_state_name='state' ) + registered_tool_names.append(index_name) + return registered_tool_names def common_entry(event_body): @@ -476,20 +639,20 @@ def common_entry(event_body): :param event_body: The event body for lambda function. return: answer(str) """ - global app, app_agent + global app if app is None: app = build_graph(ChatbotState) - if app_agent is None: - app_agent = build_agent_graph(ChatbotState) + # if app_agent is None: + # app_agent = build_agent_graph(ChatbotState) # debuging if is_running_local(): with open("common_entry_workflow.png", "wb") as f: f.write(app.get_graph().draw_mermaid_png()) - with open("common_entry_agent_workflow.png", "wb") as f: - f.write(app_agent.get_graph().draw_mermaid_png()) + # with open("common_entry_agent_workflow.png", "wb") as f: + # f.write(app_agent.get_graph().draw_mermaid_png()) ################################################################################ # prepare inputs and invoke graph @@ -505,7 +668,26 @@ def common_entry(event_body): message_id = event_body["custom_message_id"] ws_connection_id = event_body["ws_connection_id"] enable_trace = chatbot_config["enable_trace"] - register_rag_tool_from_config(event_body) + agent_config = event_body["chatbot_config"]["agent_config"] + + # register as rag tool for each aos index + registered_tool_names = register_rag_tool_from_config(event_body) + # update private knowledge tool to agent config + for registered_tool_name in registered_tool_names: + if registered_tool_name not in agent_config['tools']: + agent_config['tools'].append(registered_tool_name) + + # define all knowledge rag tool + print('private_knowledge_config',event_body["chatbot_config"]["private_knowledge_config"]) + + all_knowledge_rag_tool = ToolManager.register_common_rag_tool( + retriever_config=event_body["chatbot_config"]["private_knowledge_config"], + name="all_knowledge_rag_tool", + scene=SceneType.COMMON, + description="all knowledge rag tool", + # pass_state=True, + # pass_state_name='state' + ) # invoke graph and get results response = app.invoke( @@ -523,10 +705,14 @@ def common_entry(event_body): "debug_infos": {}, "extra_response": {}, "qq_match_results": [], - "agent_repeated_call_limit": chatbot_config["agent_repeated_call_limit"], - "agent_current_call_number": 0, - "ddb_additional_kwargs": {}, - } + "last_tool_messages":None, + "all_knowledge_rag_tool":all_knowledge_rag_tool, + "tools":None, + # "agent_repeated_call_limit": chatbot_config["agent_repeated_call_limit"], + # "agent_current_call_number": 0, + "ddb_additional_kwargs": {} + }, + config={"recursion_limit": 10} ) return response["app_response"] diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py deleted file mode 100644 index 5e4814de9..000000000 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry_v2.py +++ /dev/null @@ -1,700 +0,0 @@ -import json -from typing import Annotated, Any, TypedDict, List -import copy - -from common_logic.common_utils.chatbot_utils import ChatbotManager -from common_logic.common_utils.constant import ( - ChatbotMode, - IndexType, - LLMTaskType, - SceneType, - ToolRuningMode, -) -from common_logic.common_utils.lambda_invoke_utils import ( - invoke_lambda, - is_running_local, - node_monitor_wrapper, - send_trace, -) -from langchain_core.messages import ToolMessage,AIMessage -from common_logic.common_utils.logger_utils import get_logger -from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb -from common_logic.common_utils.python_utils import add_messages, update_nest_dict -from common_logic.common_utils.response_utils import process_response -from common_logic.common_utils.serialization_utils import JSONEncoder -from langchain_integration.tools import ToolManager -from langchain_core.tools import BaseTool -from langchain_core.messages.tool import ToolCall -from langgraph.prebuilt import ToolNode -from langchain_integration.chains import LLMChain - - -# from lambda_main.main_utils.online_entries.agent_base import ( -# build_agent_graph, -# tool_execution, -# ) -from lambda_main.main_utils.parse_config import CommonConfigParser -from langgraph.graph import END, StateGraph -from langchain_integration.langgraph_integration import set_currrent_app - -logger = get_logger("common_entry") - - -class ChatbotState(TypedDict): - ########### input/output states ########### - # inputs - # origin event body - event_body: dict - # origianl input question - query: str - # chat history between human and agent - chat_history: Annotated[list[dict], add_messages] - # complete chatbot config, consumed by all the nodes - chatbot_config: dict - # websocket connection id for the agent - ws_connection_id: str - # whether to enbale stream output via ws_connection_id - stream: bool - # message id related to original input question - message_id: str = None - # record running states of different nodes - trace_infos: Annotated[list[str], add_messages] - # whether to enbale trace info update via streaming ouput - enable_trace: bool - # outputs - # final answer generated by whole app graph - answer: Any - # information needed return to user, e.g. intention, context, figure and so on, anything you can get during execution - extra_response: Annotated[dict, update_nest_dict] - # addition kwargs which need to save into ddb - ddb_additional_kwargs: dict - # response of entire app - app_response: Any - - ########### query rewrite states ########### - # query rewrite results - query_rewrite: str = None - - ########### intention detection states ########### - # intention type of retrieved intention samples in search engine, e.g. OpenSearch - intent_type: str = None - # retrieved intention samples in search engine, e.g. OpenSearch - intent_fewshot_examples: list - # tools of retrieved intention samples in search engine, e.g. OpenSearch - intent_fewshot_tools: list - - ########### retriever states ########### - # contexts information retrieved in search engine, e.g. OpenSearch - qq_match_results: list = [] - contexts: str = None - figure: list = None - - ########### agent states ########### - # current output of agent - # agent_current_output: dict - # # record messages during agent tool choose and calling, including agent message, tool ouput and error messages - agent_tool_history: Annotated[List[AIMessage | ToolMessage], add_messages] - # # the maximum number that agent node can be called - # agent_repeated_call_limit: int - # # the current call time of agent - # agent_current_call_number: int # - # # whehter the current call time is less than maximum number of agent call - # agent_repeated_call_validation: bool - # # function calling - # # whether the output of agent can be parsed as the valid tool calling - # function_calling_parse_ok: bool - # # whether the current parsed tool calling is run once - tool_calling_is_run_once: bool - # # current tool calls - # function_calling_parsed_tool_calls: list - # current_agent_tools_def: list - last_tool_messages: List[ToolMessage] - tools: List[BaseTool] - # the global rag tool use all knowledge - all_knowledge_rag_tool: BaseTool - - -def is_null_or_empty(value): - if value is None: - return True - elif isinstance(value, (dict, list, str)) and not value: - return True - return False - - -def format_intention_output(data): - if is_null_or_empty(data): - return "" - - markdown_table = "| Query | Score | Name | Intent | Additional Info |\n" - markdown_table += "|----------------------|-------|------------|-------------|----------------------|\n" - for item in data: - query = item.get("query", "") - score = item.get("score", "") - name = item.get("name", "") - intent = item.get("intent", "") - kwargs = ', '.join([f'{k}: {v}' for k, v in item.get('kwargs', {}).items()]) - markdown_table += f"| {query} | {score} | {name} | {intent} | {kwargs} |\n" - logger.info(markdown_table) - - return markdown_table - -#################### -# nodes in graph # -#################### - - -@node_monitor_wrapper -def query_preprocess(state: ChatbotState): - output: str = invoke_lambda( - event_body=state, - lambda_name="Online_Query_Preprocess", - lambda_module_path="lambda_query_preprocess.query_preprocess", - handler_name="lambda_handler", - ) - - send_trace(f"\n**query rewrite:** {output}\n**origin query:** {state['query']}") - return {"query_rewrite": output} - - -@node_monitor_wrapper -def intention_detection(state: ChatbotState): - # if state['chatbot_config']['agent_config']['only_use_rag_tool']: - # return { - # "intent_type": "intention detected" - # } - retriever_params = state["chatbot_config"]["qq_match_config"] - retriever_params["query"] = state[ - retriever_params.get("retriever_config", {}).get("query_key", "query") - ] - output: str = invoke_lambda( - event_body=retriever_params, - lambda_name="Online_Functions", - lambda_module_path="functions.functions_utils.retriever.retriever", - handler_name="lambda_handler", - ) - context_list = [] - qq_match_threshold = retriever_params["threshold"] - for doc in output["result"]["docs"]: - if doc["retrieval_score"] > qq_match_threshold: - send_trace( - f"\n\n**similar query found**\n{doc}", - state["stream"], - state["ws_connection_id"], - state["enable_trace"], - ) - query_content = doc["answer"] - # query_content = doc['answer']['jsonlAnswer'] - return { - "answer": query_content, - "intent_type": "similar query found", - } - question = doc["question"] - answer = doc["answer"] - context_list.append(f"问题: {question}, \n答案:{answer}") - - if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: - return {"qq_match_results": context_list, "intent_type": "intention detected"} - - intent_fewshot_examples = invoke_lambda( - lambda_module_path="lambda_intention_detection.intention", - lambda_name="Online_Intention_Detection", - handler_name="lambda_handler", - event_body=state, - ) - - intent_fewshot_tools: list[str] = list( - set([e["intent"] for e in intent_fewshot_examples]) - ) - - markdown_table = format_intention_output(intent_fewshot_examples) - send_trace( - f"**intention retrieved:**\n\n {markdown_table}", - state["stream"], - state["ws_connection_id"], - state["enable_trace"], - ) - return { - "intent_fewshot_examples": intent_fewshot_examples, - "intent_fewshot_tools": intent_fewshot_tools, - "qq_match_results": context_list, - "intent_type": "intention detected", - } - - -@node_monitor_wrapper -def agent(state: ChatbotState): - # two cases to invoke rag function - # 1. when valid intention fewshot found - # 2. for the first time, agent decides to give final results - - # deal with once tool calling - last_tool_messages = state["last_tool_messages"] - if last_tool_messages and len(last_tool_messages) == 1: - last_tool_message = last_tool_messages[0] - tool:BaseTool = ToolManager.get_tool( - scene=SceneType.COMMON, - name=last_tool_message.name - ) - if tool.return_direct: - send_trace("once tool", enable_trace=state["enable_trace"]) - return {"answer": last_tool_message.content, "tool_calling_is_run_once": True} - - # tool_execute_res = last_tool_calls_results[-1].additional_kwargs[ - # "raw_tool_call_results" - # ][0] - # tool_name = tool_execute_res["name"] - # output = tool_execute_res["output"] - # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) - # if tool.running_mode == ToolRuningMode.ONCE: - # send_trace("once tool", enable_trace=state["enable_trace"]) - # return {"answer": output["result"], "tool_calling_is_run_once": True} - - - - # if state["agent_tool_history"] and state["agent_tool_history"][-1].type=="tool_call": - # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ - # "raw_tool_call_results" - # ][0] - # tool_name = tool_execute_res["name"] - # output = tool_execute_res["output"] - # tool = get_tool_by_name(tool_name, scene=SceneType.COMMON) - # if tool.running_mode == ToolRuningMode.ONCE: - # send_trace("once tool", enable_trace=state["enable_trace"]) - # return {"answer": output["result"], "tool_calling_is_run_once": True} - - no_intention_condition = not state["intent_fewshot_examples"] - # first_tool_final_response = False - # if ( - # (state["agent_current_call_number"] == 1) - # and state["function_calling_parse_ok"] - # and state["agent_tool_history"] - # ): - # tool_execute_res = state["agent_tool_history"][-1]["additional_kwargs"][ - # "raw_tool_call_results" - # ][0] - # tool_name = tool_execute_res["name"] - # if tool_name == "give_final_response": - # first_tool_final_response = True - - if ( - no_intention_condition - # or first_tool_final_response - or state["chatbot_config"]["agent_config"]["only_use_rag_tool"] - ): - if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: - send_trace("agent only use rag tool", enable_trace=state["enable_trace"]) - elif no_intention_condition: - send_trace( - "no_intention_condition, switch to rag tool", - enable_trace=state["enable_trace"], - ) - # elif first_tool_final_response: - # send_trace( - # "first tool is final response, switch to rag tool", - # enable_trace=state["enable_trace"], - # ) - - all_knowledge_rag_tool = state['all_knowledge_rag_tool'] - return AIMessage(content="",tool_calls=[ - ToolCall( - name=all_knowledge_rag_tool.name, - args={} - ) - ]) - - # response = app_agent.invoke(state) - - # normal call - agent_config = state["chatbot_config"]['agent_config'] - tools_name = state['intent_fewshot_tools'] + agent_config['tools'] - # get tools from tool names - tools = [ - ToolManager.get_tool( - scene=SceneType.COMMON, - name=name - ) - for name in tools_name - ] - llm_config = { - **agent_config['llm_config'], - "tools": tools, - "fewshot_examples": state['intent_fewshot_examples'], - } - group_name = state['chatbot_config']['group_name'] - chatbot_id = state['chatbot_config']['chatbot_id'] - prompt_templates_from_ddb = get_prompt_templates_from_ddb( - group_name, - model_id = llm_config['model_id'], - task_type=LLMTaskType.TOOL_CALLING_API, - chatbot_id=chatbot_id - ) - llm_config.update(**prompt_templates_from_ddb) - - tool_calling_chain = LLMChain.get_chain( - intent_type=LLMTaskType.TOOL_CALLING_API, - scene=SceneType.COMMON, - **llm_config - ) - agent_message:AIMessage = tool_calling_chain.invoke(**state) - send_trace( - # f"\n\n**agent_current_output:** \n{agent_message}\n\n **agent_current_call_number:** {agent_current_call_number}", - f"\n\n**agent_current_output:** \n{agent_message}\n\n", - state["stream"], - state["ws_connection_id"] - ) - - return {"agent_tool_history":[agent_message],"tools":tools} - - -@node_monitor_wrapper -def llm_direct_results_generation(state: ChatbotState): - group_name = state["chatbot_config"]["group_name"] - llm_config = state["chatbot_config"]["chat_config"] - task_type = LLMTaskType.CHAT - - prompt_templates_from_ddb = get_prompt_templates_from_ddb( - group_name, model_id=llm_config["model_id"], task_type=task_type - ) - logger.info(prompt_templates_from_ddb) - - answer: dict = invoke_lambda( - event_body={ - "llm_config": { - **llm_config, - "stream": state["stream"], - "intent_type": task_type, - **prompt_templates_from_ddb, - }, - "llm_input": { - "query": state["query"], - "chat_history": state["chat_history"], - }, - }, - lambda_name="Online_LLM_Generate", - lambda_module_path="lambda_llm_generate.llm_generate", - handler_name="lambda_handler", - ) - return {"answer": answer} - - -@node_monitor_wrapper -def tool_execution(state): - """executor lambda - Args: - state (NestUpdateState): _description_ - - Returns: - _type_: _description_ - """ - tools:List[BaseTool] = state['tools'] - tool_node = ToolNode(tools) - last_agent_message:AIMessage = state["agent_tool_history"][-1] - - # pass state to tools if needed - tools_map = {tool.name:tool for tool in tools} - tool_calls:List[ToolCall] = copy.deepcopy(last_agent_message.tool_calls) - - for tool_call in tool_calls: - tool = tools_map[tool_call.name] - if tool.pass_state: - tool_call.args.update({tool.pass_state_name:state}) - - tool_messages:List[ToolMessage] = tool_node.invoke( - [AIMessage(content="",tool_calls=tool_calls)] - ) - - # tool_calls = state['function_calling_parsed_tool_calls'] - # assert len(tool_calls) == 1, tool_calls - # tool_call_results = [] - # for tool_call in tool_calls: - # tool_name = tool_call["name"] - # tool_kwargs = tool_call['kwargs'] - # # call tool - # output = invoke_lambda( - # event_body = { - # "tool_name":tool_name, - # "state":state, - # "kwargs":tool_kwargs - # }, - # lambda_name="Online_Tool_Execute", - # lambda_module_path="functions.lambda_tool", - # handler_name="lambda_handler" - # ) - # tool_call_results.append({ - # "name": tool_name, - # "output": output, - # "kwargs": tool_call['kwargs'], - # "model_id": tool_call['model_id'] - # }) - - # output = format_tool_call_results(tool_call['model_id'],tool_call_results) - send_trace(f'**tool_execute_res:** \n{tool_messages}', enable_trace=state["enable_trace"]) - return { - "agent_tool_history": tool_messages, - "last_tool_messages": tool_messages - } - - -def final_results_preparation(state: ChatbotState): - app_response = process_response(state["event_body"], state) - return {"app_response": app_response} - - -def matched_query_return(state: ChatbotState): - return {"answer": state["answer"]} - - -################ -# define edges # -################ - - -def query_route(state: dict): - return f"{state['chatbot_config']['chatbot_mode']} mode" - - -def intent_route(state: dict): - return state["intent_type"] - - -def agent_route(state: dict): - if state.get("tool_calling_is_run_once", False): - return "no need tool calling" - - # state["agent_repeated_call_validation"] = ( - # state["agent_current_call_number"] < state["agent_repeated_call_limit"] - # ) - - if state["agent_repeated_call_validation"]: - return "valid tool calling" - else: - # TODO give final strategy - raise RuntimeError - - -############################# -# define online top-level graph for app # -############################# - - -def build_graph(chatbot_state_cls): - workflow = StateGraph(chatbot_state_cls) - - # add node for all chat/rag/agent mode - workflow.add_node("query_preprocess", query_preprocess) - # chat mode - workflow.add_node("llm_direct_results_generation", llm_direct_results_generation) - # rag mode - # workflow.add_node("knowledge_retrieve", knowledge_retrieve) - # workflow.add_node("llm_rag_results_generation", llm_rag_results_generation) - # agent mode - workflow.add_node("intention_detection", intention_detection) - workflow.add_node("matched_query_return", matched_query_return) - # agent sub graph - workflow.add_node("agent", agent) - workflow.add_node("tools_execution", tool_execution) - workflow.add_node("final_results_preparation", final_results_preparation) - - # add all edges - workflow.set_entry_point("query_preprocess") - # chat mode - workflow.add_edge("llm_direct_results_generation", "final_results_preparation") - # rag mode - # workflow.add_edge("knowledge_retrieve", "llm_rag_results_generation") - # workflow.add_edge("llm_rag_results_generation", END) - # agent mode - workflow.add_edge("tools_execution", "agent") - workflow.add_edge("matched_query_return", "final_results_preparation") - workflow.add_edge("final_results_preparation", END) - - # add conditional edges - # choose running mode based on user selection: - # 1. chat mode: let llm generate results directly - # 2. rag mode: retrive all knowledge and let llm generate results - # 3. agent mode: let llm generate results based on intention detection, tool calling and retrieved knowledge - workflow.add_conditional_edges( - "query_preprocess", - query_route, - { - "chat mode": "llm_direct_results_generation", - "agent mode": "intention_detection", - }, - ) - - # three running branch will be chosen based on intention detection results: - # 1. similar query found: if very similar queries were found in knowledge base, these queries will be given as results - # 2. intention detected: if intention detected, the agent logic will be invoked - workflow.add_conditional_edges( - "intention_detection", - intent_route, - { - "similar query found": "matched_query_return", - "intention detected": "agent", - }, - ) - - # the results of agent planning will be evaluated and decide next step: - # 1. valid tool calling: the agent chooses the valid tools, and the tools will be executed - # 2. no need tool calling: the agent thinks no tool needs to be called, the final results can be generated - workflow.add_conditional_edges( - "agent", - agent_route, - { - "valid tool calling": "tools_execution", - "no need tool calling": "final_results_preparation", - }, - ) - - app = workflow.compile() - return app - - -##################################### -# define online sub-graph for agent # -##################################### -# app_agent = None -app = None - - -# def register_rag_tool( -# name: str, -# description: str, -# scene=SceneType.COMMON, -# lambda_name: str = "lambda_common_tools", -# ): -# tool_manager.register_tool( -# { -# "name": name, -# "scene": scene, -# "lambda_name": lambda_name, -# "lambda_module_path": rag.lambda_handler, -# "tool_def": { -# "name": name, -# "description": description, -# }, -# "running_mode": ToolRuningMode.ONCE, -# } -# ) - -def register_rag_tool_from_config(event_body: dict): - group_name = event_body.get("chatbot_config").get("group_name", "Admin") - chatbot_id = event_body.get("chatbot_config").get("chatbot_id", "admin") - chatbot_manager = ChatbotManager.from_environ() - chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) - logger.info(chatbot) - registered_tool_names = [] - for index_type, item_dict in chatbot.index_ids.items(): - if index_type != IndexType.INTENTION: - for index_content in item_dict["value"].values(): - if "indexId" in index_content and "description" in index_content: - # Find retriever contain index_id - retrievers = event_body["chatbot_config"]["private_knowledge_config"]['retrievers'] - retriever = None - for retriever in retrievers: - if retriever["index_name"] == index_content["indexId"]: - break - assert retriever is not None,retrievers - reranks = event_body["chatbot_config"]["private_knowledge_config"]['reranks'] - index_name = index_content["indexId"] - # TODO give specific retriever config - ToolManager.register_common_rag_tool( - retriever_config={ - "retrievers":[retriever], - "reranks":[reranks[0]], - "llm_config": event_body["chatbot_config"]["private_knowledge_config"]['llm_config'] - }, - # event_body["chatbot_config"]["private_knowledge_config"], - name=index_name, - scene=SceneType.COMMON, - description=index_content["description"], - pass_state=True, - pass_state_name='state' - ) - registered_tool_names.append(index_name) - return registered_tool_names - - -def common_entry(event_body): - """ - Entry point for the Lambda function. - :param event_body: The event body for lambda function. - return: answer(str) - """ - global app - if app is None: - app = build_graph(ChatbotState) - - # if app_agent is None: - # app_agent = build_agent_graph(ChatbotState) - - # debuging - if is_running_local(): - with open("common_entry_workflow.png", "wb") as f: - f.write(app.get_graph().draw_mermaid_png()) - - # with open("common_entry_agent_workflow.png", "wb") as f: - # f.write(app_agent.get_graph().draw_mermaid_png()) - - ################################################################################ - # prepare inputs and invoke graph - event_body["chatbot_config"] = CommonConfigParser.from_chatbot_config( - event_body["chatbot_config"] - ) - logger.info(event_body) - chatbot_config = event_body["chatbot_config"] - query = event_body["query"] - use_history = chatbot_config["use_history"] - chat_history = event_body["chat_history"] if use_history else [] - stream = event_body["stream"] - message_id = event_body["custom_message_id"] - ws_connection_id = event_body["ws_connection_id"] - enable_trace = chatbot_config["enable_trace"] - agent_config = event_body["chatbot_config"]["agent_config"] - - # register as rag tool for each aos index - registered_tool_names = register_rag_tool_from_config(event_body) - # update private knowledge tool to agent config - for registered_tool_name in registered_tool_names: - if registered_tool_name not in agent_config['tools']: - agent_config['tools'].append(registered_tool_name) - - # define all knowledge rag tool - all_knowledge_rag_tool = ToolManager.register_common_rag_tool( - retriever_config=event_body["chatbot_config"]["private_knowledge_config"], - name="all_knowledge_rag_tool", - scene=SceneType.COMMON, - description="all knowledge rag tool", - pass_state=True, - pass_state_name='state' - ) - - # invoke graph and get results - response = app.invoke( - { - "event_body": event_body, - "stream": stream, - "chatbot_config": chatbot_config, - "query": query, - "enable_trace": enable_trace, - "trace_infos": [], - "message_id": message_id, - "chat_history": chat_history, - "agent_tool_history": [], - "ws_connection_id": ws_connection_id, - "debug_infos": {}, - "extra_response": {}, - "qq_match_results": [], - "last_tool_calls_results":None, - "all_knowledge_rag_tool":all_knowledge_rag_tool, - "tools":None, - # "agent_repeated_call_limit": chatbot_config["agent_repeated_call_limit"], - # "agent_current_call_number": 0, - "ddb_additional_kwargs": {}, - - } - ) - return response["app_response"] - - -main_chain_entry = common_entry diff --git a/source/lambda/online/lambda_main/test/local_test_base.py b/source/lambda/online/lambda_main/test/local_test_base.py index 25e351cc8..8feef1ebe 100644 --- a/source/lambda/online/lambda_main/test/local_test_base.py +++ b/source/lambda/online/lambda_main/test/local_test_base.py @@ -13,7 +13,7 @@ from typing import Any import common_logic.common_utils.websocket_utils as websocket_utils from common_logic.common_utils.constant import LLMTaskType -from langchain_core.pydantic_v1 import BaseModel, Field, validator +from pydantic import BaseModel, Field class DummyWebSocket: def post_to_connection(self,ConnectionId,Data): diff --git a/source/lambda/online/lambda_main/test/main_local_test_common.py b/source/lambda/online/lambda_main/test/main_local_test_common.py index f058da3fe..67546ca1c 100644 --- a/source/lambda/online/lambda_main/test/main_local_test_common.py +++ b/source/lambda/online/lambda_main/test/main_local_test_common.py @@ -144,11 +144,33 @@ def test_multi_turns_agent_pr(): # "qq_match": [], # "private_knowledge": ['pr_test-qd-sso_poc'] # } + # user_queries = [{ + # "query": "今天天气怎么样", + # "use_history": True, + # "enable_trace": False + # }] + user_queries = [{ + # "query": "199乘以98等于多少", + "query": "1234乘以89878等于多少?", + "use_history": True, + "enable_trace": True + }] + default_index_names = { "intention":[], "qq_match": [], "private_knowledge": [] } + default_llm_config = { + # 'model_id': 'anthropic.claude-3-sonnet-20240229-v1:0', + # 'model_id': "meta.llama3-1-70b-instruct-v1:0", + # 'model_id':"mistral.mistral-large-2407-v1:0", + 'model_id':"cohere.command-r-plus-v1:0", + 'model_kwargs': { + 'temperature': 0.1, + 'max_tokens': 4096 + } + } for query in user_queries: print("==" * 50) @@ -158,12 +180,14 @@ def test_multi_turns_agent_pr(): session_id=session_id, query=query['query'], use_history=query['use_history'], - chatbot_id="pr_test", - group_name='pr_test', + chatbot_id="admin", + group_name='admin', only_use_rag_tool=False, default_index_names=default_index_names, - enable_trace = query.get('enable_trace',True) - ) + enable_trace = query.get('enable_trace',True), + agent_config={"tools":["python_repl"]}, + default_llm_config=default_llm_config + ) print() @@ -200,8 +224,6 @@ def test_qq_case_from_hanxu(): - - def complete_test_pr(): print("start test in agent mode") test_multi_turns_agent_pr() @@ -409,10 +431,10 @@ def anta_test(): if __name__ == "__main__": # complete_test_pr() # test_multi_turns_rag_pr() - # test_multi_turns_agent_pr() + test_multi_turns_agent_pr() # test_qq_case_from_hanxu() # test_multi_turns_chat_pr() # bigo_test() # sso_batch_test() # anta_test() - bigo_test() + # bigo_test() From 6577e2664d0b7405ff03ebd9969ccf341b4d6be5 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Sun, 3 Nov 2024 05:17:43 +0000 Subject: [PATCH 051/110] move retrievers to common_logic --- .../common_logic/common_utils/prompt_utils.py | 2 +- .../retrievers/retriever.py | 181 ++++ .../retrievers/utils/aos_retrievers.py | 840 ++++++++++++++++++ .../retrievers/utils/aos_utils.py | 217 +++++ .../retrievers/utils/context_utils.py | 78 ++ .../retrievers/utils/reranker.py | 217 +++++ .../retrievers/utils/test.py | 176 ++++ .../retrievers/utils/websearch_retrievers.py | 124 +++ .../tools/common_tools/rag.py | 15 +- .../functions/lambda_common_tools/rag.py | 2 + .../lambda_intention_detection/intention.py | 20 +- 11 files changed, 1855 insertions(+), 17 deletions(-) create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_retrievers.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_utils.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/context_utils.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/reranker.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/test.py create mode 100644 source/lambda/online/common_logic/langchain_integration/retrievers/utils/websearch_retrievers.py diff --git a/source/lambda/online/common_logic/common_utils/prompt_utils.py b/source/lambda/online/common_logic/common_utils/prompt_utils.py index 0ff72f404..03bbed15c 100644 --- a/source/lambda/online/common_logic/common_utils/prompt_utils.py +++ b/source/lambda/online/common_logic/common_utils/prompt_utils.py @@ -2,7 +2,7 @@ import os import json -from langchain.pydantic_v1 import BaseModel, Field +from pydantic import BaseModel, Field from collections import defaultdict from common_logic.common_utils.constant import LLMModelType, LLMTaskType import copy diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py b/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py new file mode 100644 index 000000000..ba411211a --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py @@ -0,0 +1,181 @@ +import json +import os +os.environ["PYTHONUNBUFFERED"] = "1" +import logging +import sys + +import boto3 +from common_logic.common_utils.chatbot_utils import ChatbotManager +from common_logic.common_utils.lambda_invoke_utils import chatbot_lambda_call_wrapper +from functions.functions_utils.retriever.utils.aos_retrievers import ( + QueryDocumentBM25Retriever, + QueryDocumentKNNRetriever, + QueryQuestionRetriever, +) +from functions.functions_utils.retriever.utils.context_utils import ( + retriever_results_format, +) +from functions.functions_utils.retriever.utils.reranker import ( + BGEReranker, + MergeReranker, +) +from functions.functions_utils.retriever.utils.websearch_retrievers import ( + GoogleRetriever, +) +from langchain.retrievers import ( + ContextualCompressionRetriever, +) +from langchain_community.retrievers import AmazonKnowledgeBasesRetriever +from langchain.retrievers.merger_retriever import MergerRetriever +from langchain.schema.runnable import RunnableLambda, RunnablePassthrough +from langchain_community.retrievers import AmazonKnowledgeBasesRetriever + +logger = logging.getLogger("retriever") +logger.setLevel(logging.INFO) + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.dirname(SCRIPT_DIR)) + +kb_enabled = os.environ["KNOWLEDGE_BASE_ENABLED"].lower() == "true" +kb_type = json.loads(os.environ["KNOWLEDGE_BASE_TYPE"]) +chatbot_table_name = os.environ.get("CHATBOT_TABLE", "") +model_table_name = os.environ.get("MODEL_TABLE", "") +index_table_name = os.environ.get("INDEX_TABLE", "") +dynamodb = boto3.resource("dynamodb") +chatbot_table = dynamodb.Table(chatbot_table_name) +model_table = dynamodb.Table(model_table_name) +index_table = dynamodb.Table(index_table_name) +chatbot_manager = ChatbotManager(chatbot_table, index_table, model_table) + +region = boto3.Session().region_name + +knowledgebase_client = boto3.client("bedrock-agent-runtime", region) +sm_client = boto3.client("sagemaker-runtime") + + +def get_bedrock_kb_retrievers(knowledge_base_id_list, top_k: int): + retriever_list = [ + AmazonKnowledgeBasesRetriever( + knowledge_base_id=knowledge_base_id, + retrieval_config={"vectorSearchConfiguration": {"numberOfResults": top_k}}, + ) + for knowledge_base_id in knowledge_base_id_list + ] + return retriever_list + + +def get_websearch_retrievers(top_k: int): + retriever_list = [GoogleRetriever(top_k)] + return retriever_list + + +def get_custom_qd_retrievers(config: dict, using_bm25=False): + qd_retriever = QueryDocumentKNNRetriever(**config) + + if using_bm25: + bm25_retrievert = QueryDocumentBM25Retriever( + **{ + "index_name": config["index_name"], + "using_whole_doc": config.get("using_whole_doc", False), + "context_num": config["context_num"], + "enable_debug": config.get("enable_debug", False), + } + ) + return [qd_retriever, bm25_retrievert] + return [qd_retriever] + + +def get_custom_qq_retrievers(config: dict): + qq_retriever = QueryQuestionRetriever(**config) + return [qq_retriever] + + +def get_whole_chain(retriever_list, reranker_config): + lotr = MergerRetriever(retrievers=retriever_list) + if len(reranker_config): + default_reranker_config = { + "enable_debug": False, + "target_model": "bge_reranker_model.tar.gz", + "top_k": 10, + } + reranker_config = {**default_reranker_config, **reranker_config} + compressor = BGEReranker(**reranker_config) + else: + compressor = MergeReranker() + + compression_retriever = ContextualCompressionRetriever( + base_compressor=compressor, base_retriever=lotr + ) + whole_chain = RunnablePassthrough.assign( + docs=compression_retriever | RunnableLambda(retriever_results_format) + ) + return whole_chain + + +retriever_dict = { + "qq": get_custom_qq_retrievers, + "intention": get_custom_qq_retrievers, + "qd": get_custom_qd_retrievers, + "websearch": get_websearch_retrievers, + "bedrock_kb": get_bedrock_kb_retrievers, +} + + +def get_custom_retrievers(retriever): + return retriever_dict[retriever["index_type"]](retriever) + + + + +def lambda_handler(event, context=None): + logger.info(f"Retrieval event: {event}") + event_body = event + retriever_list = [] + for retriever in event_body["retrievers"]: + if not kb_enabled: + retriever["vector_field"] = "sentence_vector" + retriever["source_field"] = "source" + retriever["text_field"] = "paragraph" + retriever_list.extend(get_custom_retrievers(retriever)) + rerankers = event_body.get("rerankers", None) + if rerankers: + reranker_config = rerankers[0]["config"] + else: + reranker_config = {} + + if len(retriever_list) > 0: + whole_chain = get_whole_chain(retriever_list, reranker_config) + else: + whole_chain = RunnablePassthrough.assign(docs=lambda x: []) + docs = whole_chain.invoke({"query": event_body["query"], "debug_info": {}}) + return {"code": 0, "result": docs} + + +if __name__ == "__main__": + query = """test""" + event = { + "retrievers": [ + { + "index_type": "qd", + "top_k": 5, + "context_num": 1, + "using_whole_doc": False, + "query_key": "query", + "index_name": "admin-qd-default", + "kb_type": "aos", + "target_model": "amazon.titan-embed-text-v1", + "embedding_model_endpoint": "amazon.titan-embed-text-v1", + "model_type": "bedrock", + "group_name": "Admin", + } + ], + "rerankers": [], + "llm_config": { + "model_id": "anthropic.claude-3-sonnet-20240229-v1:0", + "model_kwargs": {"temperature": 0.01, "max_tokens": 1000}, + "endpoint_name": "", + }, + "query": "亚马逊云计算服务可以通过超文本传输协议(HTTP)访问吗?", + } + response = lambda_handler(event, None) + print(response) diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_retrievers.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_retrievers.py new file mode 100644 index 000000000..5fb9ff4d5 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_retrievers.py @@ -0,0 +1,840 @@ +import asyncio +import json +import logging +import os +import traceback +from typing import Any, Dict, List, Union + +import boto3 +from common_logic.common_utils.time_utils import timeit +from langchain.callbacks.manager import CallbackManagerForRetrieverRun +from langchain.docstore.document import Document +from langchain.schema.retriever import BaseRetriever +from langchain_community.embeddings import BedrockEmbeddings +from sm_utils import SagemakerEndpointVectorOrCross + +from .aos_utils import LLMBotOpenSearchClient + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +# region = os.environ["AWS_REGION"] +kb_enabled = os.environ["KNOWLEDGE_BASE_ENABLED"].lower() == "true" +kb_type = json.loads(os.environ["KNOWLEDGE_BASE_TYPE"]) +intelli_agent_kb_enabled = kb_type.get("intelliAgentKb", {}).get("enabled", False) +aos_endpoint = os.environ.get("AOS_ENDPOINT", "") +aos_domain_name = os.environ.get("AOS_DOMAIN_NAME", "smartsearch") +aos_secret = os.environ.get("AOS_SECRET_NAME", "opensearch-master-user") +sm_client = boto3.client("secretsmanager") +bedrock_region = os.environ.get("BEDROCK_REGION", "us-east-1") +try: + master_user = sm_client.get_secret_value(SecretId=aos_secret)[ + "SecretString" + ] + if not aos_endpoint: + opensearch_client = boto3.client("opensearch") + response = opensearch_client.describe_domain( + DomainName=aos_domain_name + ) + aos_endpoint = response["DomainStatus"]["Endpoint"] + cred = json.loads(master_user) + username = cred.get("username") + password = cred.get("password") + auth = (username, password) + aos_client = LLMBotOpenSearchClient(aos_endpoint, auth) +except sm_client.exceptions.ResourceNotFoundException: + logger.info(f"Secret '{aos_secret}' not found in Secrets Manager") + aos_client = LLMBotOpenSearchClient(aos_endpoint) +except Exception as e: + logger.error(f"Error retrieving secret '{aos_secret}': {str(e)}") + raise + +DEFAULT_TEXT_FIELD_NAME = "text" +DEFAULT_VECTOR_FIELD_NAME = "vector_field" +DEFAULT_SOURCE_FIELD_NAME = "source" + + +def remove_redundancy_debug_info(results): + # filtered_results = copy.deepcopy(results) + filtered_results = results + for result in filtered_results: + for field in list(result["detail"].keys()): + if field.endswith("embedding") or field.startswith("vector"): + del result["detail"][field] + return filtered_results + + +@timeit +def get_similarity_embedding( + query: str, + embedding_model_endpoint: str, + target_model: str, + model_type: str = "vector", +) -> List[List[float]]: + if model_type.lower() == "bedrock": + embeddings = BedrockEmbeddings(model_id=embedding_model_endpoint, region_name=bedrock_region) + response = embeddings.embed_query(query) + else: + query_similarity_embedding_prompt = query + response = SagemakerEndpointVectorOrCross( + prompt=query_similarity_embedding_prompt, + endpoint_name=embedding_model_endpoint, + model_type=model_type, + stop=None, + region_name=None, + target_model=target_model, + ) + return response + + +@timeit +def get_relevance_embedding( + query: str, + query_lang: str, + embedding_model_endpoint: str, + target_model: str, + model_type: str = "vector", +): + if model_type == "bedrock": + embeddings = BedrockEmbeddings(model_id=embedding_model_endpoint, region_name=bedrock_region) + response = embeddings.embed_query(query) + else: + if model_type == "vector": + if query_lang == "zh": + query_relevance_embedding_prompt = ( + "为这个句子生成表示以用于检索相关文章:" + query + ) + elif query_lang == "en": + query_relevance_embedding_prompt = ( + "Represent this sentence for searching relevant passages: " + query + ) + else: + query_relevance_embedding_prompt = query + elif model_type == "m3" or model_type == "bce": + query_relevance_embedding_prompt = query + else: + raise ValueError(f"invalid embedding model type: {model_type}") + response = SagemakerEndpointVectorOrCross( + prompt=query_relevance_embedding_prompt, + endpoint_name=embedding_model_endpoint, + model_type=model_type, + region_name=None, + stop=None, + target_model=target_model, + ) + + return response + + +def get_filter_list(parsed_query: dict): + filter_list = [] + if "is_api_query" in parsed_query and parsed_query["is_api_query"]: + filter_list.append({"term": {"metadata.is_api": True}}) + return filter_list + + +def get_faq_answer(source, index_name, source_field): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=source, + field=f"metadata.{source_field}", + ) + for r in opensearch_query_response["hits"]["hits"]: + if ( + "field" in r["_source"]["metadata"] + and "answer" == r["_source"]["metadata"]["field"] + ): + return r["_source"]["content"] + elif "jsonlAnswer" in r["_source"]["metadata"]: + return r["_source"]["metadata"]["jsonlAnswer"]["answer"] + return "" + + +def get_faq_content(source, index_name): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=source, + field="metadata.source", + ) + for r in opensearch_query_response["hits"]["hits"]: + if r["_source"]["metadata"]["field"] == "all_text": + return r["_source"]["content"] + return "" + + +def get_doc(file_path, index_name): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=file_path, + field="metadata.file_path", + size=100, + ) + chunk_list = [] + chunk_id_set = set() + for r in opensearch_query_response["hits"]["hits"]: + try: + if "chunk_id" not in r["_source"]["metadata"] or not r["_source"][ + "metadata" + ]["chunk_id"].startswith("$"): + continue + chunk_id = r["_source"]["metadata"]["chunk_id"] + content_type = r["_source"]["metadata"]["content_type"] + chunk_group_id = int(chunk_id.split("-")[0].strip("$")) + chunk_section_id = int(chunk_id.split("-")[-1]) + if (chunk_id, content_type) in chunk_id_set: + continue + except Exception as e: + logger.error(traceback.format_exc()) + continue + chunk_id_set.add((chunk_id, content_type)) + chunk_list.append( + ( + chunk_id, + chunk_group_id, + content_type, + chunk_section_id, + r["_source"]["text"], + ) + ) + sorted_chunk_list = sorted(chunk_list, key=lambda x: (x[1], x[2], x[3])) + chunk_text_list = [x[4] for x in sorted_chunk_list] + return "\n".join(chunk_text_list) + + +def get_child_context(chunk_id, index_name, window_size): + next_content_list = [] + previous_content_list = [] + previous_pos = 0 + next_pos = 0 + chunk_id_prefix = "-".join(chunk_id.split("-")[:-1]) + section_id = int(chunk_id.split("-")[-1]) + previous_section_id = section_id + next_section_id = section_id + while previous_pos < window_size: + previous_section_id -= 1 + if previous_section_id < 1: + break + previous_chunk_id = f"{chunk_id_prefix}-{previous_section_id}" + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=previous_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + previous_content_list.insert(0, r["_source"]["text"]) + previous_pos += 1 + else: + break + while next_pos < window_size: + next_section_id += 1 + next_chunk_id = f"{chunk_id_prefix}-{next_section_id}" + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=next_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + next_content_list.insert(0, r["_source"]["text"]) + next_pos += 1 + else: + break + return [previous_content_list, next_content_list] + + +def get_sibling_context(chunk_id, index_name, window_size): + next_content_list = [] + previous_content_list = [] + previous_pos = 0 + next_pos = 0 + chunk_id_prefix = "-".join(chunk_id.split("-")[:-1]) + section_id = int(chunk_id.split("-")[-1]) + previous_section_id = section_id + next_section_id = section_id + while previous_pos < window_size: + previous_section_id -= 1 + if previous_section_id < 1: + break + previous_chunk_id = f"{chunk_id_prefix}-{previous_section_id}" + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=previous_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + previous_content_list.insert(0, r["_source"]["text"]) + previous_pos += 1 + else: + break + while next_pos < window_size: + next_section_id += 1 + next_chunk_id = f"{chunk_id_prefix}-{next_section_id}" + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=next_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + next_content_list.insert(0, r["_source"]["text"]) + next_pos += 1 + else: + break + return [previous_content_list, next_content_list] + + +def get_context(aos_hit, index_name, window_size): + previous_content_list = [] + next_content_list = [] + if "chunk_id" not in aos_hit["_source"]["metadata"]: + return previous_content_list, next_content_list + chunk_id = aos_hit["_source"]["metadata"]["chunk_id"] + inner_previous_content_list, inner_next_content_list = get_sibling_context( + chunk_id, index_name, window_size + ) + if ( + len(inner_previous_content_list) == window_size + and len(inner_next_content_list) == window_size + ): + return inner_previous_content_list, inner_next_content_list + + if "heading_hierarchy" not in aos_hit["_source"]["metadata"]: + return [previous_content_list, next_content_list] + if "previous" in aos_hit["_source"]["metadata"]["heading_hierarchy"]: + previous_chunk_id = aos_hit["_source"]["metadata"]["heading_hierarchy"][ + "previous" + ] + previous_pos = 0 + while ( + previous_chunk_id + and previous_chunk_id.startswith("$") + and previous_pos < window_size + ): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=previous_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + previous_chunk_id = r["_source"]["metadata"]["heading_hierarchy"][ + "previous" + ] + previous_content_list.insert(0, r["_source"]["text"]) + previous_pos += 1 + else: + break + if "next" in aos_hit["_source"]["metadata"]["heading_hierarchy"]: + next_chunk_id = aos_hit["_source"]["metadata"]["heading_hierarchy"]["next"] + next_pos = 0 + while ( + next_chunk_id and next_chunk_id.startswith("$") and next_pos < window_size + ): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=next_chunk_id, + field="metadata.chunk_id", + size=1, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + next_chunk_id = r["_source"]["metadata"]["heading_hierarchy"]["next"] + next_content_list.append(r["_source"]["text"]) + next_pos += 1 + else: + break + return [previous_content_list, next_content_list] + + +def get_parent_content(previous_chunk_id, next_chunk_id, index_name): + previous_content_list = [] + while previous_chunk_id.startswith("$"): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=previous_chunk_id, + field="metadata.chunk_id", + size=10, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + previous_chunk_id = r["_source"]["metadata"]["chunk_id"] + previous_content_list.append(r["_source"]["text"]) + else: + break + next_content_list = [] + while next_chunk_id.startswith("$"): + opensearch_query_response = aos_client.search( + index_name=index_name, + query_type="basic", + query_term=next_chunk_id, + field="metadata.chunk_id", + size=10, + ) + if len(opensearch_query_response["hits"]["hits"]) > 0: + r = opensearch_query_response["hits"]["hits"][0] + next_chunk_id = r["_source"]["metadata"]["chunk_id"] + next_content_list.append(r["_source"]["text"]) + else: + break + return [previous_content_list, next_content_list] + + +def organize_faq_results( + response, index_name, source_field="file_path", text_field="text" +): + """ + Organize results from aos response + + :param query_type: query type + :param response: aos response json + """ + results = [] + if not response: + return results + aos_hits = response["hits"]["hits"] + for aos_hit in aos_hits: + result = {} + try: + result["score"] = aos_hit["_score"] + data = aos_hit["_source"] + metadata = data["metadata"] + if "field" in metadata: + result["answer"] = get_faq_answer( + result["source"], index_name, source_field + ) + result["content"] = aos_hit["_source"]["content"] + result["question"] = aos_hit["_source"]["content"] + result[source_field] = aos_hit["_source"]["metadata"][source_field] + elif "answer" in metadata: + # Intentions + result["answer"] = metadata["answer"] + result["question"] = data["text"] + result["content"] = data["text"] + result["source"] = metadata[source_field] + result["kwargs"] = metadata.get("kwargs", {}) + elif "jsonlAnswer" in aos_hit["_source"]["metadata"] and "answer" in aos_hit["_source"]["metadata"]["jsonlAnswer"]: + # Intention + result["answer"] = aos_hit["_source"]["metadata"]["jsonlAnswer"]["answer"] + result["question"] = aos_hit["_source"]["metadata"]["jsonlAnswer"]["question"] + result["content"] = aos_hit["_source"]["text"] + if source_field in aos_hit["_source"]["metadata"]["jsonlAnswer"].keys(): + result[source_field] = aos_hit["_source"]["metadata"]["jsonlAnswer"][source_field] + else: + result[source_field] = aos_hit["_source"]["metadata"]["file_path"] + elif "jsonlAnswer" in aos_hit["_source"]["metadata"] and "answer" not in aos_hit["_source"]["metadata"]["jsonlAnswer"]: + # QQ match + result["answer"] = aos_hit["_source"]["metadata"]["jsonlAnswer"] + result["question"] = aos_hit["_source"]["text"] + result["content"] = aos_hit["_source"]["text"] + result[source_field] = aos_hit["_source"]["metadata"]["file_path"] + else: + result["answer"] = aos_hit["_source"]["metadata"] + result["content"] = aos_hit["_source"][text_field] + result["question"] = aos_hit["_source"][text_field] + result[source_field] = aos_hit["_source"]["metadata"][source_field] + except Exception as e: + logger.error(e) + logger.error(traceback.format_exc()) + logger.error(aos_hit) + continue + results.append(result) + return results + + +class QueryQuestionRetriever(BaseRetriever): + index_name: str + vector_field: str = "vector_field" + source_field: str = "source" + top_k: int = 10 + embedding_model_endpoint: str + target_model: str + model_type: str = "vector" + enable_debug: bool = False + + @timeit + def _get_relevant_documents( + self, question: Dict, *, run_manager: CallbackManagerForRetrieverRun + ) -> List[Document]: + query = question["query"] + debug_info = question["debug_info"] + opensearch_knn_results = [] + query_repr = get_similarity_embedding( + query, self.embedding_model_endpoint, self.target_model, self.model_type + ) + opensearch_knn_response = aos_client.search( + index_name=self.index_name, + query_type="knn", + query_term=query_repr, + field=self.vector_field, + size=self.top_k, + ) + opensearch_knn_results.extend( + organize_faq_results( + opensearch_knn_response, self.index_name, self.source_field + ) + ) + docs = [] + for result in opensearch_knn_results: + docs.append( + Document( + page_content=result["content"], + metadata={ + "source": result[self.source_field], + "score": result["score"], + "retrieval_score": result["score"], + "retrieval_content": result["content"], + "answer": result["answer"], + "question": result["question"], + }, + ) + ) + if self.enable_debug: + debug_info[f"qq-knn-recall-{self.index_name}"] = ( + remove_redundancy_debug_info(opensearch_knn_results) + ) + return docs + + +class QueryDocumentKNNRetriever(BaseRetriever): + index_name: str + vector_field: str = "vector_field" + source_field: str = "file_path" + text_field: str = "text" + using_whole_doc: bool = False + context_num: int = 2 + top_k: int = 10 + # lang: Any + model_type: str = "vector" + embedding_model_endpoint: Any + target_model: Any + enable_debug: bool = False + lang: str = "zh" + + async def __ainvoke_get_context(self, aos_hit, window_size, loop): + return await loop.run_in_executor( + None, get_context, aos_hit, self.index_name, window_size + ) + + async def __spawn_task(self, aos_hits, context_size): + loop = asyncio.get_event_loop() + task_list = [] + for aos_hit in aos_hits: + if context_size: + task = asyncio.create_task( + self.__ainvoke_get_context(aos_hit, context_size, loop) + ) + task_list.append(task) + return await asyncio.gather(*task_list) + + @timeit + def organize_results( + self, + response, + aos_index=None, + source_field="file_path", + text_field="text", + using_whole_doc=True, + context_size=0, + ): + """ + Organize results from aos response + + :param query_type: query type + :param response: aos response json + """ + results = [] + if not response: + return results + aos_hits = response["hits"]["hits"] + if len(aos_hits) == 0: + return results + for aos_hit in aos_hits: + result = {"data": {}} + source = aos_hit["_source"]["metadata"][source_field] + result["source"] = source + result["score"] = aos_hit["_score"] + result["detail"] = aos_hit["_source"] + result["content"] = aos_hit["_source"][text_field] + result["doc"] = result["content"] + results.append(result) + if kb_enabled: + if using_whole_doc: + for result in results: + doc = get_doc(result["source"], aos_index) + if doc: + result["doc"] = doc + else: + response_list = asyncio.run(self.__spawn_task(aos_hits, context_size)) + for context, result in zip(response_list, results): + result["doc"] = "\n".join(context[0] + [result["content"]] + context[1]) + return results + + @timeit + def __get_knn_results(self, query_term, filter): + opensearch_knn_response = aos_client.search( + index_name=self.index_name, + query_type="knn", + query_term=query_term, + field=self.vector_field, + size=self.top_k, + filter=filter, + ) + opensearch_knn_results = self.organize_results( + opensearch_knn_response, + self.index_name, + self.source_field, + self.text_field, + self.using_whole_doc, + self.context_num, + )[: self.top_k] + return opensearch_knn_results + + @timeit + def _get_relevant_documents( + self, question: Dict, *, run_manager: CallbackManagerForRetrieverRun + ) -> List[Document]: + query = question["query"] + # if "query_lang" in question and question["query_lang"] != self.lang and "translated_text" in question: + # query = question["translated_text"] + debug_info = question["debug_info"] + query_repr = get_relevance_embedding( + query, + self.lang, + self.embedding_model_endpoint, + self.target_model, + self.model_type, + ) + # question["colbert"] = query_repr["colbert_vecs"][0] + filter = get_filter_list(question) + # Get AOS KNN results. + opensearch_knn_results = self.__get_knn_results(query_repr, filter) + final_results = opensearch_knn_results + doc_list = [] + content_set = set() + for result in final_results: + if result["doc"] in content_set: + continue + content_set.add(result["content"]) + # TODO: add jsonlans + result_metadata = { + "source": result["source"], + "retrieval_content": result["content"], + "retrieval_data": result["data"], + "retrieval_score": result["score"], + # Set common score for llm. + "score": result["score"], + } + if "figure" in result["detail"]["metadata"]: + result_metadata["figure"] = result["detail"]["metadata"]["figure"] + if "content_type" in result["detail"]["metadata"]: + result_metadata["content_type"] = result["detail"]["metadata"][ + "content_type" + ] + doc_list.append( + Document(page_content=result["doc"], metadata=result_metadata) + ) + if self.enable_debug: + debug_info[f"qd-knn-recall-{self.index_name}"] = ( + remove_redundancy_debug_info(opensearch_knn_results) + ) + + return doc_list + + +class QueryDocumentBM25Retriever(BaseRetriever): + index_name: str + vector_field: str = "vector_field" + source_field: str = "source" + text_field: str = "text" + using_whole_doc: bool = False + context_num: Any + top_k: int = 5 + enable_debug: Any + config: Dict = {"run_name": "BM25"} + + async def __ainvoke_get_context(self, aos_hit, window_size, loop): + return await loop.run_in_executor( + None, get_context, aos_hit, self.index_name, window_size + ) + + async def __spawn_task(self, aos_hits, context_size): + loop = asyncio.get_event_loop() + task_list = [] + for aos_hit in aos_hits: + if context_size: + task = asyncio.create_task( + self.__ainvoke_get_context(aos_hit, context_size, loop) + ) + task_list.append(task) + return await asyncio.gather(*task_list) + + @timeit + def organize_results( + self, + response, + aos_index=None, + source_field="file_path", + text_field="text", + using_whole_doc=True, + context_size=0, + ): + """ + Organize results from aos response + + :param query_type: query type + :param response: aos response json + """ + results = [] + if not response: + return results + aos_hits = response["hits"]["hits"] + if len(aos_hits) == 0: + return results + for aos_hit in aos_hits: + result = {"data": {}} + source = aos_hit["_source"]["metadata"][source_field] + source = ( + source.replace( + "s3://aws-chatbot-knowledge-base/aws-acts-knowledge/qd/zh_CN/", + "https://www.amazonaws.cn/", + ) + .replace( + "s3://aws-chatbot-knowledge-base/aws-acts-knowledge/qd/en_US/", + "https://www.amazonaws.cn/en/", + ) + .replace( + "s3://aws-chatbot-knowledge-base/aws-global-site-cn-knowledge/", + "https://aws.amazon.com/", + ) + ) + result["source"] = source + result["score"] = aos_hit["_score"] + result["detail"] = aos_hit["_source"] + # result["content"] = aos_hit['_source'][text_field] + result["content"] = aos_hit["_source"][text_field] + result["doc"] = result["content"] + # if 'additional_vecs' in aos_hit['_source']['metadata'] and \ + # 'colbert_vecs' in aos_hit['_source']['metadata']['additional_vecs']: + # result["data"]["colbert"] = aos_hit['_source']['metadata']['additional_vecs']['colbert_vecs'] + if "jsonlAnswer" in aos_hit["_source"]["metadata"]: + result["jsonlAnswer"] = aos_hit["_source"]["metadata"]["jsonlAnswer"] + results.append(result) + if using_whole_doc: + for result in results: + doc = get_doc(result["source"], aos_index) + if doc: + result["doc"] = doc + else: + response_list = asyncio.run(self.__spawn_task(aos_hits, context_size)) + for context, result in zip(response_list, results): + result["doc"] = "\n".join(context[0] + [result["doc"]] + context[1]) + # context = get_context(aos_hit['_source']["metadata"]["heading_hierarchy"]["previous"], + # aos_hit['_source']["metadata"]["heading_hierarchy"]["next"], + # aos_index, + # context_size) + # if context: + # result["doc"] = "\n".join(context[0] + [result["doc"]] + context[1]) + return results + + @timeit + def __get_bm25_results(self, query_term, filter): + opensearch_bm25_response = aos_client.search( + index_name=self.index_name, + query_type="fuzzy", + query_term=query_term, + field=self.text_field, + size=self.top_k, + filter=filter, + ) + opensearch_bm25_results = self.organize_results( + opensearch_bm25_response, + self.index_name, + self.source_field, + self.text_field, + self.using_whole_doc, + self.context_num, + )[: self.top_k] + return opensearch_bm25_results + + @timeit + def _get_relevant_documents( + self, question: Dict, *, run_manager: CallbackManagerForRetrieverRun + ) -> List[Document]: + query = question["query"] + # if "query_lang" in question and question["query_lang"] != self.lang and "translated_text" in question: + # query = question["translated_text"] + debug_info = question["debug_info"] + # query_repr = get_relevance_embedding(query, self.lang, self.embedding_model_endpoint, self.model_type) + # question["colbert"] = query_repr["colbert_vecs"][0] + filter = get_filter_list(question) + opensearch_bm25_results = self.__get_bm25_results(query, filter) + final_results = opensearch_bm25_results + doc_list = [] + content_set = set() + for result in final_results: + if result["doc"] in content_set: + continue + content_set.add(result["content"]) + result_metadata = { + "source": result["source"], + "retrieval_content": result["content"], + "retrieval_data": result["data"], + "retrieval_score": result["score"], + # Set common score for llm. + "score": result["score"], + } + if "figure" in result["detail"]["metadata"]: + result_metadata["figure"] = result["detail"]["metadata"]["figure"] + if "content_type" in result["detail"]["metadata"]: + result_metadata["content_type"] = result["detail"]["metadata"][ + "content_type" + ] + doc_list.append( + Document(page_content=result["doc"], metadata=result_metadata) + ) + if self.enable_debug: + debug_info[f"qd-bm25-recall-{self.index_name}"] = ( + remove_redundancy_debug_info(opensearch_bm25_results) + ) + return doc_list + + +def index_results_format(docs: list, threshold=-1): + results = [] + for doc in docs: + if doc.metadata["score"] < threshold: + continue + results.append( + { + "score": doc.metadata["score"], + "source": doc.metadata["source"], + "answer": doc.metadata["answer"], + "question": doc.metadata["question"], + } + ) + # output = {"answer": json.dumps(results, ensure_ascii=False), "sources": [], "contexts": []} + output = { + "answer": results, + "sources": [], + "contexts": [], + "context_docs": [], + "context_sources": [], + } + return output diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_utils.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_utils.py new file mode 100644 index 000000000..be14d7658 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/aos_utils.py @@ -0,0 +1,217 @@ +import os +import threading + +import boto3 +from opensearchpy import OpenSearch, RequestsHttpConnection +from requests_aws4auth import AWS4Auth + +open_search_client_lock = threading.Lock() + +credentials = boto3.Session().get_credentials() + +region = boto3.Session().region_name +awsauth = AWS4Auth( + credentials.access_key, + credentials.secret_key, + region, + "es", + session_token=credentials.token, +) + +IMPORT_OPENSEARCH_PY_ERROR = ( + "Could not import OpenSearch. Please install it with `pip install opensearch-py`." +) + + +def _import_not_found_error(): + """Import not found error if available, otherwise raise error.""" + try: + from opensearchpy.exceptions import NotFoundError + except ImportError: + raise ImportError(IMPORT_OPENSEARCH_PY_ERROR) + return NotFoundError + + +class LLMBotOpenSearchClient: + instance = None + + def __new__(cls, host, auth=None): + with open_search_client_lock: + if cls.instance is not None and cls.instance.host == host: + return cls.instance + obj = object.__new__(cls) + cls.instance = obj + return obj + + def __init__(self, host, auth=None): + """ + Initialize OpenSearch client using OpenSearch Endpoint + """ + self.host = host + self.client = OpenSearch( + hosts=[ + { + "host": host.replace("https://", ""), + "port": int(os.environ.get("AOS_PORT", 443)), + } + ], + http_auth=auth if auth is not None else awsauth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + ) + self.query_match = { + "knn": self._build_knn_search_query, + "exact": self._build_exactly_match_query, + "fuzzy": self._build_fuzzy_search_query, + "basic": self._build_basic_search_query, + } + + def _build_basic_search_query( + self, index_name, query_term, field, size, filter=None + ): + """ + Build basic search query + + :param index_name: Target Index Name + :param query_term: query term + :param field: search field + :param size: number of results to return from aos + + :return: aos response json + """ + query = { + "size": size, + "query": { + "bool": { + "should": [{"match_phrase": {field: query_term}}], + } + }, + "sort": [{"_score": {"order": "desc"}}], + } + if filter: + query["query"]["bool"]["filter"] = filter + + return query + + def _build_fuzzy_search_query( + self, index_name, query_term, field, size, filter=None + ): + """ + Build basic search query + + :param index_name: Target Index Name + :param query_term: query term + :param field: search field + :param size: number of results to return from aos + + :return: aos response json + """ + query = { + "size": size, + "query": {"match": {"text": query_term}}, + "_source": {"excludes": ["*.additional_vecs", "vector_field"]}, + } + if filter: + query["query"]["bool"]["filter"] = filter + + return query + + def _build_knn_search_query(self, index_name, query_term, field, size, filter=None): + """ + Build knn search query + + :param index_name: Target Index Name + :param query_term: query term + :param field: search field + :param size: number of results to return from aos + + :return: aos response json + """ + if filter: + query = { + "size": size, + "query": { + "bool": { + "filter": {"bool": {"must": filter}}, + "must": [{"knn": {field: {"vector": query_term, "k": size}}}], + } + }, + "_source": {"excludes": ["*.additional_vecs", "vector_field"]}, + } + else: + query = { + "size": size, + "query": {"knn": {field: {"vector": query_term, "k": size}}}, + "_source": {"excludes": ["*.additional_vecs", "vector_field"]}, + } + return query + + def _build_exactly_match_query(self, index_name, query_term, field, size): + """ + Build exactly match query + + :param index_name: Target Index Name + :param query_term: query term + :param field: search field + :param size: number of results to return from aos + + :return: aos response json + """ + query = {"query": {"match_phrase": {field: query_term}}} + return query + + def organize_results(self, query_type, response, field): + """ + Organize results from aos response + + :param query_type: query type + :param response: aos response json + """ + results = [] + aos_hits = response["hits"]["hits"] + if query_type == "exact": + for aos_hit in aos_hits: + doc = aos_hit["_source"][field] + source = aos_hit["_source"]["metadata"]["source"] + score = aos_hit["_score"] + results.append({"doc": doc, "score": score, "source": source}) + else: + for aos_hit in aos_hits: + doc = f"{aos_hit['_source'][field]}" + source = aos_hit["_source"]["metadata"]["source"] + score = aos_hit["_score"] + results.append({"doc": doc, "score": score, "source": source}) + return results + + def search( + self, + index_name, + query_type, + query_term, + field: str = "text", + size: int = 10, + filter=None, + ): + """ + Perform search on aos + + :param index_name: Target Index Name + :param query_type: query type + :param query_term: query term + :param field: search field + :param size: number of results to return from aos + :param filter: filter query + + :return: aos response json + """ + not_found_error = _import_not_found_error() + try: + self.client.indices.get(index=index_name) + except not_found_error: + return [] + query = self.query_match[query_type]( + index_name, query_term, field, size, filter + ) + response = self.client.search(body=query, index=index_name) + return response diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/context_utils.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/context_utils.py new file mode 100644 index 000000000..cada844c0 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/context_utils.py @@ -0,0 +1,78 @@ +import logging +import os + +from langchain.docstore.document import Document + +from common_logic.common_utils.time_utils import timeit + +logger = logging.getLogger("context_utils") +logger.setLevel(logging.INFO) + + +def contexts_trunc(docs: list[dict], context_num=2): + docs = [doc for doc in docs[:context_num]] + # the most related doc will be placed last + docs.sort(key=lambda x: x["score"]) + # filter same docs + s = set() + context_strs = [] + context_docs = [] + context_sources = [] + for doc in docs: + content = doc["page_content"] + if content not in s: + context_strs.append(content) + s.add(content) + context_docs.append( + {"doc": content, "source": doc["source"], "score": doc["score"]} + ) + context_sources.append(doc["source"]) + return { + "contexts": context_strs, + "context_docs": context_docs, + "context_sources": context_sources, + } + + +@timeit +def retriever_results_format( + docs: list[Document], + print_source=True, + print_content=os.environ.get("print_content", False), +): + doc_dicts = [] + + for doc in docs: + doc_dicts.append( + { + "page_content": doc.page_content, + "retrieval_score": doc.metadata["retrieval_score"], + "rerank_score": doc.metadata["score"], + "score": doc.metadata["score"], + "source": doc.metadata["source"], + "answer": doc.metadata.get("answer", ""), + "question": doc.metadata.get("question", ""), + "figure": doc.metadata.get("figure", []), + } + ) + if print_source: + source_strs = [] + for doc_dict in doc_dicts: + content = "" + if print_content: + content = f', content: {doc_dict["page_content"]}' + source_strs.append( + f'source: {doc_dict["source"]}, score: {doc_dict["score"]}{content}, retrieval score: {doc_dict["retrieval_score"]}' + ) + logger.info("retrieved sources:\n" + "\n".join(source_strs)) + return doc_dicts + + +def documents_list_filter(doc_dicts: list[dict], filter_key="score", threshold=-1): + results = [] + for doc_dict in doc_dicts: + if doc_dict[filter_key] < threshold: + continue + results.append(doc_dict) + + return results diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/reranker.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/reranker.py new file mode 100644 index 000000000..7405d59d5 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/reranker.py @@ -0,0 +1,217 @@ +import json +import os +import time +import logging +import asyncio +import numpy as np +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +from typing import Dict, Optional, Sequence, Any + +from langchain.callbacks.manager import Callbacks +from langchain.schema import Document +from langchain.retrievers.document_compressors.base import BaseDocumentCompressor + +from sm_utils import SagemakerEndpointVectorOrCross + +rerank_model_endpoint = os.environ.get("RERANK_ENDPOINT", "") + +"""Document compressor that uses BGE reranker model.""" +class BGEM3Reranker(BaseDocumentCompressor): + + """Number of documents to return.""" + def _colbert_score_np(self, q_reps, p_reps): + token_scores = np.einsum('nik,njk->nij', q_reps, p_reps) + scores = token_scores.max(-1) + scores = np.sum(scores) / q_reps.shape[0] + return scores + + async def __ainvoke_rerank_model(self, query_batch, doc_batch, loop): + return await loop.run_in_executor(None, + self._colbert_score_np, + np.asarray(query_batch), + np.asarray(doc_batch)) + + async def __spawn_task(self, query_colbert_list, doc_colbert_list): + batch_size = 1 + task_list = [] + loop = asyncio.get_event_loop() + for batch_start in range(0, len(query_colbert_list), batch_size): + task = asyncio.create_task(self.__ainvoke_rerank_model( + query_colbert_list[batch_start:batch_start + batch_size], + doc_colbert_list[batch_start:batch_start + batch_size], loop)) + task_list.append(task) + return await asyncio.gather(*task_list) + + def compress_documents( + self, + documents: Sequence[Document], + query: dict, + callbacks: Optional[Callbacks] = None, + ) -> Sequence[Document]: + """ + Compress documents using BGE M3 Colbert Score. + + Args: + documents: A sequence of documents to compress. + query: The query to use for compressing the documents. + callbacks: Callbacks to run during the compression process. + + Returns: + A sequence of compressed documents. + """ + start = time.time() + if len(documents) == 0: # to avoid empty api call + return [] + doc_list = list(documents) + _docs = [d.metadata["retrieval_data"]['colbert'] for d in doc_list] + + rerank_text_length = 1024 * 10 + query_colbert_list = [] + doc_colbert_list = [] + for doc in _docs: + query_colbert_list.append(query["colbert"][:rerank_text_length]) + doc_colbert_list.append(doc[:rerank_text_length]) + score_list = [] + logger.info(f'rerank pair num {len(query_colbert_list)}, m3 method: colbert score') + score_list = asyncio.run(self.__spawn_task(query_colbert_list, doc_colbert_list)) + final_results = [] + debug_info = query["debug_info"] + debug_info["knowledge_qa_rerank"] = [] + for doc, score in zip(doc_list, score_list): + doc.metadata["rerank_score"] = score + # set common score for llm. + doc.metadata["score"] = doc.metadata["rerank_score"] + final_results.append(doc) + debug_info["knowledge_qa_rerank"].append((doc.page_content, doc.metadata["retrieval_content"], doc.metadata["source"], score)) + final_results.sort(key=lambda x: x.metadata["rerank_score"], reverse=True) + debug_info["knowledge_qa_rerank"].sort(key=lambda x: x[-1], reverse=True) + recall_end_time = time.time() + elpase_time = recall_end_time - start + logger.info(f"runing time of rerank: {elpase_time}s seconds") + return final_results + +"""Document compressor that uses BGE reranker model.""" +class BGEReranker(BaseDocumentCompressor): + + """Number of documents to return.""" + config: Dict={"run_name": "BGEReranker"} + enable_debug: Any + target_model: Any + rerank_model_endpoint: str=rerank_model_endpoint + top_k: int=10 + + def __init__(self,enable_debug=False, rerank_model_endpoint=rerank_model_endpoint, target_model=None, top_k=10): + super().__init__() + self.enable_debug = enable_debug + self.rerank_model_endpoint = rerank_model_endpoint + self.target_model = target_model + self.top_k = top_k + + async def __ainvoke_rerank_model(self, batch, loop): + logging.info("invoke endpoint") + return await loop.run_in_executor(None, + SagemakerEndpointVectorOrCross, + json.dumps(batch), + self.rerank_model_endpoint, + None, + "rerank", + None, + self.target_model) + + async def __spawn_task(self, rerank_pair): + batch_size = 128 + task_list = [] + loop = asyncio.get_event_loop() + for batch_start in range(0, len(rerank_pair), batch_size): + task = asyncio.create_task(self.__ainvoke_rerank_model(rerank_pair[batch_start:batch_start + batch_size], loop)) + task_list.append(task) + return await asyncio.gather(*task_list) + + def compress_documents( + self, + documents: Sequence[Document], + query: str, + callbacks: Optional[Callbacks] = None, + ) -> Sequence[Document]: + """ + Compress documents using BGE rerank model. + + Args: + documents: A sequence of documents to compress. + query: The query to use for compressing the documents. + callbacks: Callbacks to run during the compression process. + + Returns: + A sequence of compressed documents. + """ + start = time.time() + if len(documents) == 0: # to avoid empty api call + return [] + doc_list = list(documents) + _docs = [d.metadata["retrieval_content"] for d in doc_list] + + rerank_pair = [] + rerank_text_length = 1024 * 10 + for doc in _docs: + rerank_pair.append([query["query"], doc[:rerank_text_length]]) + score_list = [] + logger.info(f'rerank pair num {len(rerank_pair)}, endpoint_name: {self.rerank_model_endpoint}') + response_list = asyncio.run(self.__spawn_task(rerank_pair)) + for response in response_list: + score_list.extend(json.loads(response)) + final_results = [] + debug_info = query["debug_info"] + debug_info["knowledge_qa_rerank"] = [] + for doc, score in zip(doc_list, score_list): + doc.metadata["rerank_score"] = score + # set common score for llm. + doc.metadata["retrieval_score"] = doc.metadata["retrieval_score"] + doc.metadata["score"] = doc.metadata["rerank_score"] + final_results.append(doc) + if self.enable_debug: + debug_info["knowledge_qa_rerank"].append((doc.page_content, doc.metadata["retrieval_content"], doc.metadata["source"], score)) + final_results.sort(key=lambda x: x.metadata["rerank_score"], reverse=True) + debug_info["knowledge_qa_rerank"].sort(key=lambda x: x[-1], reverse=True) + recall_end_time = time.time() + elpase_time = recall_end_time - start + logger.info(f"runing time of rerank: {elpase_time}s seconds") + return final_results[:self.top_k] + +"""Document compressor that uses retriever score.""" +class MergeReranker(BaseDocumentCompressor): + + """Number of documents to return.""" + + def compress_documents( + self, + documents: Sequence[Document], + query: str, + callbacks: Optional[Callbacks] = None, + ) -> Sequence[Document]: + """ + Compress documents using BGE rerank model. + + Args: + documents: A sequence of documents to compress. + query: The query to use for compressing the documents. + callbacks: Callbacks to run during the compression process. + + Returns: + A sequence of compressed documents. + """ + start = time.time() + if len(documents) == 0: # to avoid empty api call + return [] + final_results = [] + debug_info = query["debug_info"] + debug_info["knowledge_qa_rerank"] = [] + final_results = list(documents) + final_results.sort(key=lambda x: x.metadata["score"], reverse=True) + debug_info["knowledge_qa_rerank"].append([(doc.page_content, doc.metadata["retrieval_content"], + doc.metadata["source"], doc.metadata["score"]) for doc in final_results]) + recall_end_time = time.time() + elpase_time = recall_end_time - start + logger.info(f"runing time of rerank: {elpase_time}s seconds") + return final_results \ No newline at end of file diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/test.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/test.py new file mode 100644 index 000000000..2c7daa753 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/test.py @@ -0,0 +1,176 @@ +import json +import os + +os.environ["PYTHONUNBUFFERED"] = "1" +import logging +import sys + +import boto3 +from common_logic.common_utils.lambda_invoke_utils import chatbot_lambda_call_wrapper +from lambda_retriever.utils.aos_retrievers import ( + QueryDocumentBM25Retriever, + QueryDocumentKNNRetriever, + QueryQuestionRetriever, +) +from lambda_retriever.utils.context_utils import retriever_results_format +from lambda_retriever.utils.reranker import MergeReranker +from langchain.retrievers import ( + AmazonKnowledgeBasesRetriever, + ContextualCompressionRetriever, +) +from langchain.retrievers.merger_retriever import MergerRetriever +from langchain.schema.runnable import RunnableLambda, RunnablePassthrough +from langchain_community.retrievers import AmazonKnowledgeBasesRetriever + +logger = logging.getLogger("retriever") +logger.setLevel(logging.INFO) + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.dirname(SCRIPT_DIR)) + +region = boto3.Session().region_name + +knowledgebase_client = boto3.client("bedrock-agent-runtime", region) +sm_client = boto3.client("sagemaker-runtime") + + +def get_bedrock_kb_retrievers(knowledge_base_id_list, top_k: int): + retriever_list = [ + AmazonKnowledgeBasesRetriever( + knowledge_base_id=knowledge_base_id, + retrieval_config={"vectorSearchConfiguration": {"numberOfResults": top_k}}, + ) + for knowledge_base_id in knowledge_base_id_list + ] + return retriever_list + + +def get_custom_qd_retrievers(retriever_config, using_bm25=False): + default_qd_config = { + "using_whole_doc": False, + "context_num": 1, + "top_k": 10, + "query_key": "query", + } + # qd_config = {**default_qd_config, **qd_config} + retriever_list = [QueryDocumentKNNRetriever(retriever_config)] + if using_bm25: + retriever_list += [QueryDocumentBM25Retriever(retriever_config)] + return retriever_list + + +def get_custom_qq_retrievers(retriever_config): + default_qq_config = {"top_k": 10, "query_key": "query"} + + return [ + QueryQuestionRetriever( + retriever_config, + # **qq_config + ) + ] + + +def get_whole_chain(retriever_list, reranker_config): + lotr = MergerRetriever(retrievers=retriever_list) + # if len(reranker_config): + # default_reranker_config = { + # "enable_debug": False, + # "target_model": "bge_reranker_model.tar.gz", + # "query_key": "query", + # "top_k": 10 + # } + # reranker_config = {**default_reranker_config, **reranker_config} + # compressor = BGEReranker(**reranker_config) + # else: + compressor = MergeReranker() + + compression_retriever = ContextualCompressionRetriever( + base_compressor=compressor, base_retriever=lotr + ) + whole_chain = RunnablePassthrough.assign( + docs=compression_retriever | RunnableLambda(retriever_results_format) + ) + return whole_chain + + +def get_custom_retrievers(retriever_config, retriever_type="qd"): + retriever_dict = { + "qq": get_custom_qq_retrievers, + "qd": get_custom_qd_retrievers, + "bedrock_kb": get_bedrock_kb_retrievers, + } + # retriever_type = retriever_config["type"] + return retriever_dict[retriever_type](retriever_config) + + +@chatbot_lambda_call_wrapper +def lambda_handler(event, context=None): + event_body = event + retriever_list = [] + retriever_type = event_body["type"] + for retriever_config in event_body["retrievers"]: + # retriever_type = retriever_config["type"] + retriever_list.extend(get_custom_retrievers(retriever_config, retriever_type)) + + # Re-rank not used. + # rerankers = event_body.get("rerankers", None) + # if rerankers: + # reranker_config = rerankers[0]["config"] + # else: + # reranker_config = {} + reranker_config = {} + if len(retriever_list) > 0: + whole_chain = get_whole_chain(retriever_list, reranker_config) + else: + whole_chain = RunnablePassthrough.assign(docs=lambda x: []) + docs = whole_chain.invoke({"query": event_body["query"], "debug_info": {}}) + return {"code": 0, "result": docs} + + +if __name__ == "__main__": + query = """test""" + event = { + "body": json.dumps( + { + "retrievers": [ + { + "index": "test-intent", + "config": {"top_k": "3"}, + "embedding": { + "type": "Bedrock", + "model_id": "cohere.embed-multilingual-v3", + }, + } + ], + "query": query, + "type": "qq", + } + ) + } + + event2 = { + "body": json.dumps( + { + "retrievers": [ + { + "index": "test-qa", + "config": { + "top_k": "3", + "vector_field_name": "sentence_vector", + "text_field_name": "paragraph", + "source_field_name": "source", + }, + "embedding": { + "type": "Bedrock", + "model_id": "amazon.titan-embed-text-v2:0", + }, + } + ], + "query": query, + "type": "qd", + } + ) + } + + response = lambda_handler(event2, None) + print(response) diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/utils/websearch_retrievers.py b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/websearch_retrievers.py new file mode 100644 index 000000000..babdeb9b3 --- /dev/null +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/utils/websearch_retrievers.py @@ -0,0 +1,124 @@ +import asyncio +import aiohttp +import time +import re +from bs4 import BeautifulSoup +import os +from typing import Any, Dict, List +import logging +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +from langchain_community.utilities import GoogleSearchAPIWrapper +from langchain.callbacks.manager import CallbackManagerForRetrieverRun +from langchain.docstore.document import Document +from langchain.schema.retriever import BaseRetriever +from langchain.agents import Tool + +GOOGLE_API_KEY=os.environ.get('GOOGLE_API_KEY',None) +GOOGLE_CSE_ID=os.environ.get('GOOGLE_CSE_ID',None) + + +class GoogleSearchTool(): + tool:Tool + topk:int = 5 + + def __init__(self,top_k=5): + self.topk = top_k + search = GoogleSearchAPIWrapper() + def top_results(query): + return search.results(query, self.topk) + self.tool = Tool( + name="Google Search Snippets", + description="Search Google for recent results.", + func=top_results, + ) + + def run(self,query): + return self.tool.run(query) + +def remove_html_tags(text): + soup = BeautifulSoup(text, 'html.parser') + text = soup.get_text() + text = re.sub(r'\r{1,}',"\n\n",text) + text = re.sub(r'\t{1,}',"\t",text) + text = re.sub(r'\n{2,}',"\n\n",text) + return text + +async def fetch(session, url, timeout): + try: + async with session.get(url) as response: + return await asyncio.wait_for(response.text(), timeout=timeout) + except asyncio.TimeoutError: + print(f"timeout:{url}") + return '' + except Exception as e: + print(f"ClientError:{url}", str(e)) + return '' + + +async def fetch_all(urls, timeout): + async with aiohttp.ClientSession() as session: + tasks = [] + for url in urls: + task = asyncio.create_task(fetch(session, url, timeout)) + tasks.append(task) + + results = await asyncio.gather(*tasks) + return results + +def web_search(**args): + if not GOOGLE_API_KEY or not GOOGLE_CSE_ID: + logger.info('Missing google API key') + return [] + tool = GoogleSearchTool(args['top_k']) + result = tool.run(args['query']) + return [item for item in result if 'title' in item and 'link' in item and 'snippet' in item] + + +def add_webpage_content(snippet_results): + t1 = time.time() + urls = [item['doc_author'] for item in snippet_results] + loop = asyncio.get_event_loop() + fetch_results = loop.run_until_complete(fetch_all(urls,5)) + t2= time.time() + logger.info(f'deep web search time:{t2-t1:1f}s') + final_results = [] + for i, result in enumerate(fetch_results): + if not result: + continue + page_content = remove_html_tags(result) + final_results.append({**snippet_results[i], + 'doc':snippet_results[i]['doc']+'\n'+page_content[:10000] + }) + return final_results + +class GoogleRetriever(BaseRetriever): + search: Any + result_num: Any + + def __init__(self, result_num): + super().__init__() + self.result_num = result_num + + def _get_relevant_documents( + self, question: Dict, *, run_manager: CallbackManagerForRetrieverRun + ) -> List[Document]: + query = question[self.query_key] + result_list = web_search(query=query, top_k=self.result_num) + doc_list = [] + for result in result_list: + doc_list.append( + Document( + page_content=result["doc"], + metadata={ + "source": result["link"], + "retrieval_content": result["title"] + '\n' + result["snippet"], + "retrieval_data": result["title"] + } + ) + ) + return doc_list + + def get_whole_doc(self, results) -> Dict: + return add_webpage_content(self._get_relevant_documents(results)) \ No newline at end of file diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py index 8d6ce7d3a..1e727de1e 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py @@ -4,7 +4,7 @@ LLMTaskType ) from common_logic.common_utils.lambda_invoke_utils import send_trace - +from common_logic.langchain_integration.retrievers.retriever import lambda_handler as retrieve_fn def rag_tool(retriever_config:dict,query=None): state = StateContext.get_current_state() @@ -17,12 +17,13 @@ def rag_tool(retriever_config:dict,query=None): # retriever_params = state["chatbot_config"]["private_knowledge_config"] retriever_params["query"] = query or state[retriever_config.get("query_key","query")] # retriever_params["query"] = query - output: str = invoke_lambda( - event_body=retriever_params, - lambda_name="Online_Functions", - lambda_module_path="functions.functions_utils.retriever.retriever", - handler_name="lambda_handler", - ) + # output: str = invoke_lambda( + # event_body=retriever_params, + # lambda_name="Online_Functions", + # lambda_module_path="functions.functions_utils.retriever.retriever", + # handler_name="lambda_handler", + # ) + output = retrieve_fn(retriever_params) for doc in output["result"]["docs"]: context_list.append(doc["page_content"]) diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index 01170fabd..e6e9e60b7 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -17,12 +17,14 @@ def lambda_handler(event_body, context=None): retriever_params = state["chatbot_config"]["private_knowledge_config"] retriever_params["query"] = state[retriever_params.get( "retriever_config", {}).get("query_key", "query")] + output: str = invoke_lambda( event_body=retriever_params, lambda_name="Online_Functions", lambda_module_path="functions.functions_utils.retriever.retriever", handler_name="lambda_handler", ) + print("RAG debug") print(output) diff --git a/source/lambda/online/lambda_intention_detection/intention.py b/source/lambda/online/lambda_intention_detection/intention.py index f0bbc4561..93f019d51 100644 --- a/source/lambda/online/lambda_intention_detection/intention.py +++ b/source/lambda/online/lambda_intention_detection/intention.py @@ -1,9 +1,11 @@ -from common_logic.common_utils.logger_utils import get_logger -from common_logic.common_utils.lambda_invoke_utils import chatbot_lambda_call_wrapper,invoke_lambda import json import pathlib import os +from common_logic.common_utils.logger_utils import get_logger +from common_logic.common_utils.lambda_invoke_utils import chatbot_lambda_call_wrapper,invoke_lambda +from common_logic.langchain_integration.retrievers.retriever import lambda_handler as retrieve_fn + logger = get_logger("intention") kb_enabled = os.environ["KNOWLEDGE_BASE_ENABLED"].lower() == "true" kb_type = json.loads(os.environ["KNOWLEDGE_BASE_TYPE"]) @@ -26,12 +28,13 @@ def get_intention_results(query:str, intention_config:dict): **intention_config } # call retriver - res:list[dict] = invoke_lambda( - lambda_name="Online_Functions", - lambda_module_path="functions.functions_utils.retriever.retriever", - handler_name="lambda_handler", - event_body=event_body - ) + # res:list[dict] = invoke_lambda( + # lambda_name="Online_Functions", + # lambda_module_path="functions.functions_utils.retriever.retriever", + # handler_name="lambda_handler", + # event_body=event_body + # ) + res = retrieve_fn(event_body) if not res["result"]["docs"]: # add default intention @@ -91,6 +94,5 @@ def lambda_handler(state:dict, context=None): **intention_config, } ) - return output From dacd1eb75834a97c4e5e42f9089a8597bc877994 Mon Sep 17 00:00:00 2001 From: NingLu Date: Mon, 4 Nov 2024 12:24:15 +0000 Subject: [PATCH 052/110] feat: support showing reference in monitor --- .../common_logic/common_utils/monitor_utils.py | 18 ++++++++++-------- .../retriever/utils/aos_retrievers.py | 14 -------------- source/lambda/online/lambda_main/main.py | 5 ++++- 3 files changed, 14 insertions(+), 23 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index 7d75a34c6..c028c8017 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -52,24 +52,26 @@ def format_rag_data(data, qq_result) -> str: if data is None or len(data) == 0: return "" - markdown_table = "| Source | Score | RAG Context |\n" - markdown_table += "|-----|-----|-----|\n" + markdown_table = "| Source File Name | Source URI | Score | RAG Context |\n" + markdown_table += "|-----|-----|-----|-----|\n" for item in data: - source = _generate_markdown_link(item.get("source", "")) + raw_source = item.get("source", "") + source = _generate_markdown_link(raw_source) score = item.get("score", -1) page_content = item.get("page_content", "").replace("\n", "
") - markdown_table += f"| {source} | {score} | {page_content} |\n\n" + markdown_table += f"| {source} | {raw_source} | {score} | {page_content} |\n\n" markdown_table += "**QQ Match Result**\n" - markdown_table += "| Source | Score | Question | Answer |\n" - markdown_table += "|-----|-----|-----|-----|\n" + markdown_table += "| Source File Name | Source URI | Score | Question | Answer |\n" + markdown_table += "|-----|-----|-----|-----|-----|\n" for qq_item in qq_result: - qq_source = _generate_markdown_link(qq_item.get("source", "")) + raw_qq_source = qq_item.get("source", "") + qq_source = _generate_markdown_link(raw_qq_source) qq_score = qq_item.get("score", -1) qq_question = qq_item.get("page_content", "").replace("\n", "
") qq_answer = qq_item.get("answer", "").replace("\n", "
") - markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + markdown_table += f"| {qq_source} | {raw_qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" return markdown_table diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py b/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py index 5fb9ff4d5..6a7b5caeb 100644 --- a/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py +++ b/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py @@ -710,20 +710,6 @@ def organize_results( for aos_hit in aos_hits: result = {"data": {}} source = aos_hit["_source"]["metadata"][source_field] - source = ( - source.replace( - "s3://aws-chatbot-knowledge-base/aws-acts-knowledge/qd/zh_CN/", - "https://www.amazonaws.cn/", - ) - .replace( - "s3://aws-chatbot-knowledge-base/aws-acts-knowledge/qd/en_US/", - "https://www.amazonaws.cn/en/", - ) - .replace( - "s3://aws-chatbot-knowledge-base/aws-global-site-cn-knowledge/", - "https://aws.amazon.com/", - ) - ) result["source"] = source result["score"] = aos_hit["_score"] result["detail"] = aos_hit["_source"] diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index aca2226e6..17175f39b 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -10,6 +10,7 @@ from common_logic.common_utils.lambda_invoke_utils import ( chatbot_lambda_call_wrapper, is_running_local, + send_trace ) from common_logic.common_utils.logger_utils import get_logger from common_logic.common_utils.websocket_utils import load_ws_client @@ -367,6 +368,8 @@ def lambda_handler(event_body: dict, context: dict): return default_event_handler(event_body, context, entry_executor) except Exception as e: error_response = {"answer": str(e), "extra_response": {}} + enable_trace = event_body.get("chatbot_config", {}).get("enable_trace", True) + send_trace(f"\n### Error trace\n\n{traceback.format_exc()}\n\n", enable_trace=enable_trace) process_response(event_body, error_response) - logger.error(f"An error occurred: {str(e)}") + logger.error(f"An error occurred: {str(e)}\n{traceback.format_exc()}") return {"error": str(e)} From 5f9294f86746a194c4e670a3dd0dba3dc8d78529 Mon Sep 17 00:00:00 2001 From: Ning Lv Date: Tue, 5 Nov 2024 12:09:41 +0800 Subject: [PATCH 053/110] feat: add intention not ready condition --- .../online/lambda_intention_detection/intention.py | 10 ++++++++-- source/lambda/online/lambda_main/main.py | 5 +++-- .../main_utils/online_entries/common_entry.py | 11 ++++++++++- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/source/lambda/online/lambda_intention_detection/intention.py b/source/lambda/online/lambda_intention_detection/intention.py index f0bbc4561..d0d32a14d 100644 --- a/source/lambda/online/lambda_intention_detection/intention.py +++ b/source/lambda/online/lambda_intention_detection/intention.py @@ -10,7 +10,7 @@ intelli_agent_kb_enabled = kb_type.get("intelliAgentKb", {}).get("enabled", False) -def get_intention_results(query:str, intention_config:dict): +def get_intention_results(query: str, intention_config: dict): """get intention few shots results according embedding similarity Args: @@ -25,6 +25,9 @@ def get_intention_results(query:str, intention_config:dict): "type": "qq", **intention_config } + + logger.info("intention event body") + logger.info(event_body) # call retriver res:list[dict] = invoke_lambda( lambda_name="Online_Functions", @@ -34,6 +37,9 @@ def get_intention_results(query:str, intention_config:dict): ) if not res["result"]["docs"]: + # Return to guide the user to add intentions + return [], False + # add default intention current_path = pathlib.Path(__file__).parent.resolve() try: @@ -76,7 +82,7 @@ def get_intention_results(query:str, intention_config:dict): } intent_fewshot_examples.append(doc_item) - return intent_fewshot_examples + return intent_fewshot_examples, True @chatbot_lambda_call_wrapper diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index 17175f39b..fbeaa1ce5 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -369,7 +369,8 @@ def lambda_handler(event_body: dict, context: dict): except Exception as e: error_response = {"answer": str(e), "extra_response": {}} enable_trace = event_body.get("chatbot_config", {}).get("enable_trace", True) - send_trace(f"\n### Error trace\n\n{traceback.format_exc()}\n\n", enable_trace=enable_trace) + error_trace = f"\n### Error trace\n\n{traceback.format_exc()}\n\n" + send_trace(error_trace, enable_trace=enable_trace) process_response(event_body, error_response) - logger.error(f"An error occurred: {str(e)}\n{traceback.format_exc()}") + logger.error(f"An error occurred: {str(e)}\n{error_trace}") return {"error": str(e)} diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index ea618f864..2d6ebab69 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -159,13 +159,16 @@ def intention_detection(state: ChatbotState): if state["chatbot_config"]["agent_config"]["only_use_rag_tool"]: return {"qq_match_results": context_list, "intent_type": "intention detected"} - intent_fewshot_examples = invoke_lambda( + intent_fewshot_examples, intention_ready = invoke_lambda( lambda_module_path="lambda_intention_detection.intention", lambda_name="Online_Intention_Detection", handler_name="lambda_handler", event_body=state, ) + if not intention_ready: + return {"intent_type": "intention not ready"} + intent_fewshot_tools: list[str] = list( set([e["intent"] for e in intent_fewshot_examples]) ) @@ -299,6 +302,10 @@ def matched_query_return(state: ChatbotState): return {"answer": state["answer"]} +def intention_not_ready(state: ChatbotState): + return {"answer": state["answer"]} + + ################ # define edges # ################ @@ -345,6 +352,7 @@ def build_graph(chatbot_state_cls): # agent mode workflow.add_node("intention_detection", intention_detection) workflow.add_node("matched_query_return", matched_query_return) + workflow.add_node("intention_not_ready", intention_not_ready) # agent sub graph workflow.add_node("agent", agent) workflow.add_node("tools_execution", tool_execution) @@ -386,6 +394,7 @@ def build_graph(chatbot_state_cls): { "similar query found": "matched_query_return", "intention detected": "agent", + "intention not ready": "intention_not_ready", }, ) From 8a2a4e87e3c2efa6b98cc709f8d6b782ff125cb3 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 5 Nov 2024 04:34:54 +0000 Subject: [PATCH 054/110] feat: support no intention --- .../common_logic/common_utils/constant.py | 4 ++ .../common_utils/monitor_utils.py | 11 ++-- .../functions/lambda_common_tools/rag.py | 3 -- .../lambda_intention_detection/intention.py | 50 +++++++++---------- .../main_utils/online_entries/common_entry.py | 7 ++- 5 files changed, 39 insertions(+), 36 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/constant.py b/source/lambda/online/common_logic/common_utils/constant.py index 518d35daf..68b975446 100644 --- a/source/lambda/online/common_logic/common_utils/constant.py +++ b/source/lambda/online/common_logic/common_utils/constant.py @@ -171,3 +171,7 @@ class IndexTag(Enum): @unique class KBType(Enum): AOS = "aos" + + +GUIDE_INTENTION_NOT_FOUND = "Intention not found, please add intentions first when using agent mode" + diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index c028c8017..9a0941d0b 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -28,12 +28,11 @@ def format_qq_data(data) -> str: markdown_table += "| Source | Score | Question | Answer |\n" markdown_table += "|-----|-----|-----|-----|\n" - for qq_item in data: - qq_source = _generate_markdown_link(qq_item.get("source", "")) - qq_score = qq_item.get("score", -1) - qq_question = qq_item.get("page_content", "").replace("\n", "
") - qq_answer = qq_item.get("answer", "").replace("\n", "
") - markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + qq_source = _generate_markdown_link(data.get("source", "")) + qq_score = data.get("score", -1) + qq_question = data.get("page_content", "").replace("\n", "
") + qq_answer = data.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" return markdown_table diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/functions/lambda_common_tools/rag.py index 01170fabd..3ce4622ef 100644 --- a/source/lambda/online/functions/lambda_common_tools/rag.py +++ b/source/lambda/online/functions/lambda_common_tools/rag.py @@ -9,7 +9,6 @@ def lambda_handler(event_body, context=None): state = event_body["state"] - print(event_body) context_list = [] # Add qq match results context_list.extend(state["qq_match_results"]) @@ -23,8 +22,6 @@ def lambda_handler(event_body, context=None): lambda_module_path="functions.functions_utils.retriever.retriever", handler_name="lambda_handler", ) - print("RAG debug") - print(output) for doc in output["result"]["docs"]: context_list.append(doc["page_content"]) diff --git a/source/lambda/online/lambda_intention_detection/intention.py b/source/lambda/online/lambda_intention_detection/intention.py index d0d32a14d..fbc72fbc6 100644 --- a/source/lambda/online/lambda_intention_detection/intention.py +++ b/source/lambda/online/lambda_intention_detection/intention.py @@ -26,8 +26,6 @@ def get_intention_results(query: str, intention_config: dict): **intention_config } - logger.info("intention event body") - logger.info(event_body) # call retriver res:list[dict] = invoke_lambda( lambda_name="Online_Functions", @@ -40,31 +38,31 @@ def get_intention_results(query: str, intention_config: dict): # Return to guide the user to add intentions return [], False - # add default intention - current_path = pathlib.Path(__file__).parent.resolve() - try: - with open(f"{current_path}/intention_utils/default_intent.jsonl", "r") as json_file: - json_list = list(json_file) - except FileNotFoundError: - logger.error(f"File note found: {current_path}/intention_utils/default_intent.jsonl") - json_list = [] + # # add default intention + # current_path = pathlib.Path(__file__).parent.resolve() + # try: + # with open(f"{current_path}/intention_utils/default_intent.jsonl", "r") as json_file: + # json_list = list(json_file) + # except FileNotFoundError: + # logger.error(f"File note found: {current_path}/intention_utils/default_intent.jsonl") + # json_list = [] - intent_fewshot_examples = [] - for json_str in json_list: - try: - intent_result = json.loads(json_str) - except json.JSONDecodeError as e: - logger.error(f"Error decoding JSON: {e}") - intent_result = {} - question = intent_result.get("question","你好") - answer = intent_result.get("answer",{}) - intent_fewshot_examples.append({ - "query": question, - "score": "n/a", - "name": answer.get("intent","chat"), - "intent": answer.get("intent","chat"), - "kwargs": answer.get("kwargs", {}), - }) + # intent_fewshot_examples = [] + # for json_str in json_list: + # try: + # intent_result = json.loads(json_str) + # except json.JSONDecodeError as e: + # logger.error(f"Error decoding JSON: {e}") + # intent_result = {} + # question = intent_result.get("question","你好") + # answer = intent_result.get("answer",{}) + # intent_fewshot_examples.append({ + # "query": question, + # "score": "n/a", + # "name": answer.get("intent","chat"), + # "intent": answer.get("intent","chat"), + # "kwargs": answer.get("kwargs", {}), + # }) else: intent_fewshot_examples = [] for doc in res["result"]["docs"]: diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index 2d6ebab69..fa58b4405 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -7,6 +7,7 @@ LLMTaskType, SceneType, ToolRuningMode, + GUIDE_INTENTION_NOT_FOUND, ) from common_logic.common_utils.lambda_invoke_utils import ( invoke_lambda, @@ -167,7 +168,10 @@ def intention_detection(state: ChatbotState): ) if not intention_ready: - return {"intent_type": "intention not ready"} + return { + "answer": GUIDE_INTENTION_NOT_FOUND, + "intent_type": "intention not ready", + } intent_fewshot_tools: list[str] = list( set([e["intent"] for e in intent_fewshot_examples]) @@ -369,6 +373,7 @@ def build_graph(chatbot_state_cls): # agent mode workflow.add_edge("tools_execution", "agent") workflow.add_edge("matched_query_return", "final_results_preparation") + workflow.add_edge("intention_not_ready", "final_results_preparation") workflow.add_edge("final_results_preparation", END) # add conditional edges From 89922bd059de5fbd0873910e6d5b18bce33cf055 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 5 Nov 2024 04:38:15 +0000 Subject: [PATCH 055/110] fix: revert qq monitor --- .../online/common_logic/common_utils/monitor_utils.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index 9a0941d0b..c028c8017 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -28,11 +28,12 @@ def format_qq_data(data) -> str: markdown_table += "| Source | Score | Question | Answer |\n" markdown_table += "|-----|-----|-----|-----|\n" - qq_source = _generate_markdown_link(data.get("source", "")) - qq_score = data.get("score", -1) - qq_question = data.get("page_content", "").replace("\n", "
") - qq_answer = data.get("answer", "").replace("\n", "
") - markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + for qq_item in data: + qq_source = _generate_markdown_link(qq_item.get("source", "")) + qq_score = qq_item.get("score", -1) + qq_question = qq_item.get("page_content", "").replace("\n", "
") + qq_answer = qq_item.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" return markdown_table From d9b6851fc311b76c17f99c3c8d577f5eaf3e6fe3 Mon Sep 17 00:00:00 2001 From: zhouxss Date: Tue, 5 Nov 2024 05:46:20 +0000 Subject: [PATCH 056/110] move functions to __functions --- source/lambda/online/{functions => __functions}/__init__.py | 0 source/lambda/online/{functions => __functions}/_tool_base.py | 0 .../functions_utils/retriever/retriever.py | 0 .../functions_utils/retriever/utils/aos_retrievers.py | 0 .../functions_utils/retriever/utils/aos_utils.py | 0 .../functions_utils/retriever/utils/context_utils.py | 0 .../functions_utils/retriever/utils/reranker.py | 0 .../functions_utils/retriever/utils/test.py | 0 .../functions_utils/retriever/utils/websearch_retrievers.py | 0 .../{functions => __functions}/lambda_aws_qa_tools/__init__.py | 0 .../lambda_aws_qa_tools/aws_ec2_price.py | 0 .../lambda_aws_qa_tools/check_service_availability.py | 0 .../{functions => __functions}/lambda_aws_qa_tools/comfort.py | 0 .../lambda_aws_qa_tools/explain_abbr.py | 0 .../{functions => __functions}/lambda_aws_qa_tools/service_org.py | 0 .../{functions => __functions}/lambda_aws_qa_tools/transfer.py | 0 .../{functions => __functions}/lambda_common_tools/__init__.py | 0 .../online/{functions => __functions}/lambda_common_tools/chat.py | 0 .../lambda_common_tools/comparison_rag.py | 0 .../{functions => __functions}/lambda_common_tools/get_weather.py | 0 .../lambda_common_tools/give_final_response.py | 0 .../lambda_common_tools/give_rhetorical_question.py | 0 .../online/{functions => __functions}/lambda_common_tools/rag.py | 0 .../lambda_common_tools/step_back_rag.py | 0 .../{functions => __functions}/lambda_retail_tools/__init__.py | 0 .../lambda_retail_tools/customer_complain.py | 0 .../lambda_retail_tools/daily_reception.py | 0 .../lambda_retail_tools/goods_exchange.py | 0 .../{functions => __functions}/lambda_retail_tools/order_info.py | 0 .../lambda_retail_tools/product_aftersales.py | 0 .../lambda_retail_tools/product_information_search.py | 0 .../{functions => __functions}/lambda_retail_tools/promotion.py | 0 .../lambda_retail_tools/rule_response.py | 0 .../{functions => __functions}/lambda_retail_tools/size_guide.py | 0 .../{functions => __functions}/lambda_retail_tools/transfer.py | 0 source/lambda/online/{functions => __functions}/lambda_tool.py | 0 .../online/{functions => __functions}/tool_calling_parse.py | 0 .../{functions => __functions}/tool_execute_result_format.py | 0 38 files changed, 0 insertions(+), 0 deletions(-) rename source/lambda/online/{functions => __functions}/__init__.py (100%) rename source/lambda/online/{functions => __functions}/_tool_base.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/retriever.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/aos_retrievers.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/aos_utils.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/context_utils.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/reranker.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/test.py (100%) rename source/lambda/online/{functions => __functions}/functions_utils/retriever/utils/websearch_retrievers.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/__init__.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/aws_ec2_price.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/check_service_availability.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/comfort.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/explain_abbr.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/service_org.py (100%) rename source/lambda/online/{functions => __functions}/lambda_aws_qa_tools/transfer.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/__init__.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/chat.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/comparison_rag.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/get_weather.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/give_final_response.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/give_rhetorical_question.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/rag.py (100%) rename source/lambda/online/{functions => __functions}/lambda_common_tools/step_back_rag.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/__init__.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/customer_complain.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/daily_reception.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/goods_exchange.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/order_info.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/product_aftersales.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/product_information_search.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/promotion.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/rule_response.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/size_guide.py (100%) rename source/lambda/online/{functions => __functions}/lambda_retail_tools/transfer.py (100%) rename source/lambda/online/{functions => __functions}/lambda_tool.py (100%) rename source/lambda/online/{functions => __functions}/tool_calling_parse.py (100%) rename source/lambda/online/{functions => __functions}/tool_execute_result_format.py (100%) diff --git a/source/lambda/online/functions/__init__.py b/source/lambda/online/__functions/__init__.py similarity index 100% rename from source/lambda/online/functions/__init__.py rename to source/lambda/online/__functions/__init__.py diff --git a/source/lambda/online/functions/_tool_base.py b/source/lambda/online/__functions/_tool_base.py similarity index 100% rename from source/lambda/online/functions/_tool_base.py rename to source/lambda/online/__functions/_tool_base.py diff --git a/source/lambda/online/functions/functions_utils/retriever/retriever.py b/source/lambda/online/__functions/functions_utils/retriever/retriever.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/retriever.py rename to source/lambda/online/__functions/functions_utils/retriever/retriever.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py b/source/lambda/online/__functions/functions_utils/retriever/utils/aos_retrievers.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/aos_retrievers.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/aos_retrievers.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/aos_utils.py b/source/lambda/online/__functions/functions_utils/retriever/utils/aos_utils.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/aos_utils.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/aos_utils.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py b/source/lambda/online/__functions/functions_utils/retriever/utils/context_utils.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/context_utils.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/reranker.py b/source/lambda/online/__functions/functions_utils/retriever/utils/reranker.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/reranker.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/reranker.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/test.py b/source/lambda/online/__functions/functions_utils/retriever/utils/test.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/test.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/test.py diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py b/source/lambda/online/__functions/functions_utils/retriever/utils/websearch_retrievers.py similarity index 100% rename from source/lambda/online/functions/functions_utils/retriever/utils/websearch_retrievers.py rename to source/lambda/online/__functions/functions_utils/retriever/utils/websearch_retrievers.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/__init__.py b/source/lambda/online/__functions/lambda_aws_qa_tools/__init__.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/__init__.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/__init__.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/aws_ec2_price.py b/source/lambda/online/__functions/lambda_aws_qa_tools/aws_ec2_price.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/aws_ec2_price.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/aws_ec2_price.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/check_service_availability.py b/source/lambda/online/__functions/lambda_aws_qa_tools/check_service_availability.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/check_service_availability.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/check_service_availability.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/comfort.py b/source/lambda/online/__functions/lambda_aws_qa_tools/comfort.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/comfort.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/comfort.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/explain_abbr.py b/source/lambda/online/__functions/lambda_aws_qa_tools/explain_abbr.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/explain_abbr.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/explain_abbr.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/service_org.py b/source/lambda/online/__functions/lambda_aws_qa_tools/service_org.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/service_org.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/service_org.py diff --git a/source/lambda/online/functions/lambda_aws_qa_tools/transfer.py b/source/lambda/online/__functions/lambda_aws_qa_tools/transfer.py similarity index 100% rename from source/lambda/online/functions/lambda_aws_qa_tools/transfer.py rename to source/lambda/online/__functions/lambda_aws_qa_tools/transfer.py diff --git a/source/lambda/online/functions/lambda_common_tools/__init__.py b/source/lambda/online/__functions/lambda_common_tools/__init__.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/__init__.py rename to source/lambda/online/__functions/lambda_common_tools/__init__.py diff --git a/source/lambda/online/functions/lambda_common_tools/chat.py b/source/lambda/online/__functions/lambda_common_tools/chat.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/chat.py rename to source/lambda/online/__functions/lambda_common_tools/chat.py diff --git a/source/lambda/online/functions/lambda_common_tools/comparison_rag.py b/source/lambda/online/__functions/lambda_common_tools/comparison_rag.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/comparison_rag.py rename to source/lambda/online/__functions/lambda_common_tools/comparison_rag.py diff --git a/source/lambda/online/functions/lambda_common_tools/get_weather.py b/source/lambda/online/__functions/lambda_common_tools/get_weather.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/get_weather.py rename to source/lambda/online/__functions/lambda_common_tools/get_weather.py diff --git a/source/lambda/online/functions/lambda_common_tools/give_final_response.py b/source/lambda/online/__functions/lambda_common_tools/give_final_response.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/give_final_response.py rename to source/lambda/online/__functions/lambda_common_tools/give_final_response.py diff --git a/source/lambda/online/functions/lambda_common_tools/give_rhetorical_question.py b/source/lambda/online/__functions/lambda_common_tools/give_rhetorical_question.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/give_rhetorical_question.py rename to source/lambda/online/__functions/lambda_common_tools/give_rhetorical_question.py diff --git a/source/lambda/online/functions/lambda_common_tools/rag.py b/source/lambda/online/__functions/lambda_common_tools/rag.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/rag.py rename to source/lambda/online/__functions/lambda_common_tools/rag.py diff --git a/source/lambda/online/functions/lambda_common_tools/step_back_rag.py b/source/lambda/online/__functions/lambda_common_tools/step_back_rag.py similarity index 100% rename from source/lambda/online/functions/lambda_common_tools/step_back_rag.py rename to source/lambda/online/__functions/lambda_common_tools/step_back_rag.py diff --git a/source/lambda/online/functions/lambda_retail_tools/__init__.py b/source/lambda/online/__functions/lambda_retail_tools/__init__.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/__init__.py rename to source/lambda/online/__functions/lambda_retail_tools/__init__.py diff --git a/source/lambda/online/functions/lambda_retail_tools/customer_complain.py b/source/lambda/online/__functions/lambda_retail_tools/customer_complain.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/customer_complain.py rename to source/lambda/online/__functions/lambda_retail_tools/customer_complain.py diff --git a/source/lambda/online/functions/lambda_retail_tools/daily_reception.py b/source/lambda/online/__functions/lambda_retail_tools/daily_reception.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/daily_reception.py rename to source/lambda/online/__functions/lambda_retail_tools/daily_reception.py diff --git a/source/lambda/online/functions/lambda_retail_tools/goods_exchange.py b/source/lambda/online/__functions/lambda_retail_tools/goods_exchange.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/goods_exchange.py rename to source/lambda/online/__functions/lambda_retail_tools/goods_exchange.py diff --git a/source/lambda/online/functions/lambda_retail_tools/order_info.py b/source/lambda/online/__functions/lambda_retail_tools/order_info.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/order_info.py rename to source/lambda/online/__functions/lambda_retail_tools/order_info.py diff --git a/source/lambda/online/functions/lambda_retail_tools/product_aftersales.py b/source/lambda/online/__functions/lambda_retail_tools/product_aftersales.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/product_aftersales.py rename to source/lambda/online/__functions/lambda_retail_tools/product_aftersales.py diff --git a/source/lambda/online/functions/lambda_retail_tools/product_information_search.py b/source/lambda/online/__functions/lambda_retail_tools/product_information_search.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/product_information_search.py rename to source/lambda/online/__functions/lambda_retail_tools/product_information_search.py diff --git a/source/lambda/online/functions/lambda_retail_tools/promotion.py b/source/lambda/online/__functions/lambda_retail_tools/promotion.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/promotion.py rename to source/lambda/online/__functions/lambda_retail_tools/promotion.py diff --git a/source/lambda/online/functions/lambda_retail_tools/rule_response.py b/source/lambda/online/__functions/lambda_retail_tools/rule_response.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/rule_response.py rename to source/lambda/online/__functions/lambda_retail_tools/rule_response.py diff --git a/source/lambda/online/functions/lambda_retail_tools/size_guide.py b/source/lambda/online/__functions/lambda_retail_tools/size_guide.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/size_guide.py rename to source/lambda/online/__functions/lambda_retail_tools/size_guide.py diff --git a/source/lambda/online/functions/lambda_retail_tools/transfer.py b/source/lambda/online/__functions/lambda_retail_tools/transfer.py similarity index 100% rename from source/lambda/online/functions/lambda_retail_tools/transfer.py rename to source/lambda/online/__functions/lambda_retail_tools/transfer.py diff --git a/source/lambda/online/functions/lambda_tool.py b/source/lambda/online/__functions/lambda_tool.py similarity index 100% rename from source/lambda/online/functions/lambda_tool.py rename to source/lambda/online/__functions/lambda_tool.py diff --git a/source/lambda/online/functions/tool_calling_parse.py b/source/lambda/online/__functions/tool_calling_parse.py similarity index 100% rename from source/lambda/online/functions/tool_calling_parse.py rename to source/lambda/online/__functions/tool_calling_parse.py diff --git a/source/lambda/online/functions/tool_execute_result_format.py b/source/lambda/online/__functions/tool_execute_result_format.py similarity index 100% rename from source/lambda/online/functions/tool_execute_result_format.py rename to source/lambda/online/__functions/tool_execute_result_format.py From 92243ae132632b065da5b850633547502ed02f1a Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 5 Nov 2024 06:06:47 +0000 Subject: [PATCH 057/110] chore: add link --- source/lambda/online/common_logic/common_utils/constant.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/lambda/online/common_logic/common_utils/constant.py b/source/lambda/online/common_logic/common_utils/constant.py index 68b975446..37b4d7451 100644 --- a/source/lambda/online/common_logic/common_utils/constant.py +++ b/source/lambda/online/common_logic/common_utils/constant.py @@ -173,5 +173,5 @@ class KBType(Enum): AOS = "aos" -GUIDE_INTENTION_NOT_FOUND = "Intention not found, please add intentions first when using agent mode" +GUIDE_INTENTION_NOT_FOUND = "Intention not found, please add intentions first when using agent mode, refer to https://amzn-chn.feishu.cn/docx/HlxvduJYgoOz8CxITxXc43XWn8e" From 3e786963421737fb03f3c1d7b4dbb6f4b4878af2 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 5 Nov 2024 06:58:45 +0000 Subject: [PATCH 058/110] chore: add error handle logic for monitor --- .../common_utils/monitor_utils.py | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index c028c8017..6f0f06624 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -5,6 +5,14 @@ logger.setLevel(logging.INFO) +def is_null_or_empty(value): + if value is None: + return True + elif isinstance(value, (dict, list, str)) and not value: + return True + return False + + def _generate_markdown_link(file_path: str) -> str: file_name = file_path.split("/")[-1] markdown_link = f"[{file_name}]({file_path})" @@ -21,7 +29,7 @@ def format_qq_data(data) -> str: Returns: str: A markdown table string representing the formatted data. """ - if data is None or len(data) == 0: + if is_null_or_empty(data): return "" markdown_table = "**QQ Match Result**\n" @@ -49,7 +57,7 @@ def format_rag_data(data, qq_result) -> str: Returns: str: A markdown table string representing the formatted data. """ - if data is None or len(data) == 0: + if is_null_or_empty(data): return "" markdown_table = "| Source File Name | Source URI | Score | RAG Context |\n" @@ -61,29 +69,22 @@ def format_rag_data(data, qq_result) -> str: page_content = item.get("page_content", "").replace("\n", "
") markdown_table += f"| {source} | {raw_source} | {score} | {page_content} |\n\n" - markdown_table += "**QQ Match Result**\n" - markdown_table += "| Source File Name | Source URI | Score | Question | Answer |\n" - markdown_table += "|-----|-----|-----|-----|-----|\n" - - for qq_item in qq_result: - raw_qq_source = qq_item.get("source", "") - qq_source = _generate_markdown_link(raw_qq_source) - qq_score = qq_item.get("score", -1) - qq_question = qq_item.get("page_content", "").replace("\n", "
") - qq_answer = qq_item.get("answer", "").replace("\n", "
") - markdown_table += f"| {qq_source} | {raw_qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + if not is_null_or_empty(qq_result): + markdown_table += "**QQ Match Result**\n" + markdown_table += "| Source File Name | Source URI | Score | Question | Answer |\n" + markdown_table += "|-----|-----|-----|-----|-----|\n" + + for qq_item in qq_result: + raw_qq_source = qq_item.get("source", "") + qq_source = _generate_markdown_link(raw_qq_source) + qq_score = qq_item.get("score", -1) + qq_question = qq_item.get("page_content", "").replace("\n", "
") + qq_answer = qq_item.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {raw_qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" return markdown_table -def is_null_or_empty(value): - if value is None: - return True - elif isinstance(value, (dict, list, str)) and not value: - return True - return False - - def format_preprocess_output(ori_query, rewrite_query): if is_null_or_empty(ori_query) or is_null_or_empty(rewrite_query): return "" From 6e80cf0aac0e31131a51f3d018a83cb526bcb9c5 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Tue, 5 Nov 2024 07:19:48 +0000 Subject: [PATCH 059/110] fix: update qq monitor --- .../common_logic/common_utils/monitor_utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index 6f0f06624..be1574210 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -36,12 +36,12 @@ def format_qq_data(data) -> str: markdown_table += "| Source | Score | Question | Answer |\n" markdown_table += "|-----|-----|-----|-----|\n" - for qq_item in data: - qq_source = _generate_markdown_link(qq_item.get("source", "")) - qq_score = qq_item.get("score", -1) - qq_question = qq_item.get("page_content", "").replace("\n", "
") - qq_answer = qq_item.get("answer", "").replace("\n", "
") - markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" + # Data contains only one QQ match result + qq_source = _generate_markdown_link(data.get("source", "")) + qq_score = data.get("score", -1) + qq_question = data.get("page_content", "").replace("\n", "
") + qq_answer = data.get("answer", "").replace("\n", "
") + markdown_table += f"| {qq_source} | {qq_score} | {qq_question} | {qq_answer} |\n" return markdown_table From 9c82d96c8b9516df3ba0298fdcccd144d819d00e Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 03:15:39 +0000 Subject: [PATCH 060/110] chore: optimize constants and delete object from knowledge base --- .../knowledge-base/knowledge-base-stack.ts | 1 + source/lambda/etl/constant.py | 22 ++++++++-- source/lambda/etl/delete_execution.py | 5 ++- source/lambda/etl/notification.py | 40 +++++++++++++------ source/lambda/etl/sfn_handler.py | 34 +++++----------- source/lambda/etl/utils/ddb_utils.py | 28 +++++-------- 6 files changed, 71 insertions(+), 59 deletions(-) diff --git a/source/infrastructure/lib/knowledge-base/knowledge-base-stack.ts b/source/infrastructure/lib/knowledge-base/knowledge-base-stack.ts index 5f1c724bb..52812678a 100644 --- a/source/infrastructure/lib/knowledge-base/knowledge-base-stack.ts +++ b/source/infrastructure/lib/knowledge-base/knowledge-base-stack.ts @@ -367,6 +367,7 @@ export class KnowledgeBaseStack extends NestedStack implements KnowledgeBaseStac message: sfn.TaskInput.fromObject({ "executionId.$": "$.tableItemId", "mapResults.$": "$.mapResults", + "operationType.$": "$.operationType", }), }); diff --git a/source/lambda/etl/constant.py b/source/lambda/etl/constant.py index 4b1278492..b97a7135b 100644 --- a/source/lambda/etl/constant.py +++ b/source/lambda/etl/constant.py @@ -11,9 +11,17 @@ class KBType(Enum): @unique -class Status(Enum): - ACTIVE = "active" - INACTIVE = "inactive" +class UiStatus(Enum): + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" + + +@unique +class ExecutionStatus(Enum): + IN_PROGRESS = "IN-PROGRESS" + COMPLETED = "COMPLETED" + DELETING = "DELETING" + DELETED = "DELETED" class EmbeddingModelType(Enum): @@ -36,3 +44,11 @@ class ModelType(Enum): @unique class IndexTag(Enum): COMMON = "common" + + +@unique +class OperationType(Enum): + CREATE = "create" + UPDATE = "update" + DELETE = "delete" + EXTRACT_ONLY = "extract_only" diff --git a/source/lambda/etl/delete_execution.py b/source/lambda/etl/delete_execution.py index 5ba6f94e0..8ead1bde4 100644 --- a/source/lambda/etl/delete_execution.py +++ b/source/lambda/etl/delete_execution.py @@ -11,6 +11,7 @@ import os import boto3 +from constant import ExecutionStatus, OperationType, UiStatus # Set up logging logger = logging.getLogger() @@ -78,7 +79,7 @@ def delete_execution_pipeline(execution_id): raise Exception(f"Execution {execution_id} not found") # Update execution item status - update_execution_item(execution_id, "DELETED", "INACTIVE") + update_execution_item(execution_id, ExecutionStatus.DELETING.value, UiStatus.ACTIVE.value) # Prepare input for Step Function to delete document from OpenSearch delete_document_sfn_input = { @@ -86,7 +87,7 @@ def delete_execution_pipeline(execution_id): "s3Prefix": execution_item["s3Prefix"], "chatbotId": execution_item["chatbotId"], "indexType": execution_item["indexType"], - "operationType": "delete", + "operationType": OperationType.DELETE.value, "indexId": execution_item["indexId"], "groupName": execution_item["groupName"], "tableItemId": execution_item["executionId"], diff --git a/source/lambda/etl/notification.py b/source/lambda/etl/notification.py index 280e8f1db..f92066c2f 100644 --- a/source/lambda/etl/notification.py +++ b/source/lambda/etl/notification.py @@ -3,6 +3,7 @@ import os import boto3 +from constant import ExecutionStatus, OperationType, UiStatus logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -10,23 +11,38 @@ execution_table = dynamodb.Table(os.environ.get("EXECUTION_TABLE")) -def lambda_handler(event, context): - logger.info(f"event:{event}") - if len(event["Records"]) != 1: - raise ValueError( - f"Record is not valid, it should only has 1 item, {event}") +def update_execution_item(execution_id, execution_status, ui_status): + """ + Update the status of an execution item in DynamoDB. - message = json.loads(event["Records"][0]["Sns"]["Message"]) - execution_id = message["executionId"] - status = "COMPLETED" + Args: + execution_id (str): The ID of the execution to update. + execution_status (str): The new execution status. + ui_status (str): The new UI status. + Returns: + dict: The response from the DynamoDB update operation. + """ response = execution_table.update_item( Key={"executionId": execution_id}, - UpdateExpression="SET executionStatus = :val", - ExpressionAttributeValues={ - ":val": status - }, + UpdateExpression="SET executionStatus = :execution_status, uiStatus = :ui_status", + ExpressionAttributeValues={":execution_status": execution_status, ":ui_status": ui_status}, ReturnValues="UPDATED_NEW", ) + return response + + +def lambda_handler(event, context): + logger.info(f"event:{event}") + if len(event["Records"]) != 1: + raise ValueError(f"Record is not valid, it should only has 1 item, {event}") + + message = json.loads(event["Records"][0]["Sns"]["Message"]) + execution_id = message["executionId"] + operation_type = message["operationType"] + if operation_type == OperationType.DELETE.value: + update_execution_item(execution_id, ExecutionStatus.DELETED.value, UiStatus.INACTIVE.value) + else: + update_execution_item(execution_id, ExecutionStatus.COMPLETED.value, UiStatus.ACTIVE.value) logger.info(f"DynamoDB update: {response}") diff --git a/source/lambda/etl/sfn_handler.py b/source/lambda/etl/sfn_handler.py index b40627610..c39994b18 100644 --- a/source/lambda/etl/sfn_handler.py +++ b/source/lambda/etl/sfn_handler.py @@ -2,13 +2,11 @@ import logging import os from datetime import datetime, timezone -from urllib.parse import unquote_plus -from utils.parameter_utils import get_query_parameter -from chatbot_management import create_chatbot -import boto3 -from constant import IndexTag, IndexType -from utils.ddb_utils import initiate_chatbot, initiate_index, initiate_model +import boto3 +from chatbot_management import create_chatbot +from constant import ExecutionStatus, IndexType, UiStatus +from utils.parameter_utils import get_query_parameter client = boto3.client("stepfunctions") dynamodb = boto3.resource("dynamodb") @@ -36,9 +34,7 @@ def handler(event, context): "Access-Control-Allow-Methods": "*", } - authorizer_type = ( - event["requestContext"].get("authorizer", {}).get("authorizerType") - ) + authorizer_type = event["requestContext"].get("authorizer", {}).get("authorizerType") if authorizer_type == "lambda_authorizer": claims = json.loads(event["requestContext"]["authorizer"]["claims"]) if "use_api_key" in claims: @@ -72,9 +68,7 @@ def handler(event, context): ), } index_type = input_body["indexType"] - group_name = ( - "Admin" if "Admin" in cognito_groups_list else cognito_groups_list[0] - ) + group_name = "Admin" if "Admin" in cognito_groups_list else cognito_groups_list[0] chatbot_id = input_body.get("chatbotId", group_name.lower()) if "indexId" in input_body: @@ -93,31 +87,25 @@ def handler(event, context): tag = index_id input_body["indexId"] = index_id - input_body["groupName"] = ( - group_name if "groupName" not in input_body else input_body["groupName"] - ) - chatbot_event = { - "body": json.dumps({"group_name": group_name}) - } + input_body["groupName"] = group_name if "groupName" not in input_body else input_body["groupName"] + chatbot_event = {"body": json.dumps({"group_name": group_name})} chatbot_result = create_chatbot(chatbot_event, group_name) input_body["tableItemId"] = context.aws_request_id input_body["chatbotId"] = chatbot_id input_body["embeddingModelType"] = chatbot_result["modelType"] input_payload = json.dumps(input_body) - response = client.start_execution( - stateMachineArn=sfn_arn, input=input_payload - ) + response = client.start_execution(stateMachineArn=sfn_arn, input=input_payload) # Update execution table item if "tableItemId" in input_body: del input_body["tableItemId"] execution_id = response["executionArn"].split(":")[-1] input_body["sfnExecutionId"] = execution_id - input_body["executionStatus"] = "IN-PROGRESS" + input_body["executionStatus"] = ExecutionStatus.IN_PROGRESS.value input_body["indexId"] = index_id input_body["executionId"] = context.aws_request_id - input_body["uiStatus"] = "ACTIVE" + input_body["uiStatus"] = UiStatus.ACTIVE.value input_body["createTime"] = create_time execution_table.put_item(Item=input_body) diff --git a/source/lambda/etl/utils/ddb_utils.py b/source/lambda/etl/utils/ddb_utils.py index dd1cc1e2b..f9cce332b 100644 --- a/source/lambda/etl/utils/ddb_utils.py +++ b/source/lambda/etl/utils/ddb_utils.py @@ -1,6 +1,6 @@ from datetime import datetime, timezone -from constant import IndexTag, IndexType, KBType, ModelType, Status +from constant import IndexType, KBType, ModelType, UiStatus from utils.embeddings import get_embedding_info @@ -31,12 +31,8 @@ def update_model(model_table, item_key, model_parameter): ) -def initiate_model( - model_table, group_name, model_id, embedding_endpoint, create_time=None -): - existing_item = item_exist( - model_table, {"groupName": group_name, "modelId": model_id} - ) +def initiate_model(model_table, group_name, model_id, embedding_endpoint, create_time=None): + existing_item = item_exist(model_table, {"groupName": group_name, "modelId": model_id}) embedding_info = get_embedding_info(embedding_endpoint) embedding_info["ModelEndpoint"] = embedding_endpoint if existing_item: @@ -61,7 +57,7 @@ def initiate_model( "parameter": embedding_info, "createTime": create_time, "updateTime": create_time, - "status": Status.ACTIVE.value, + "status": UiStatus.ACTIVE.value, }, ) return embedding_info["ModelType"] @@ -77,9 +73,7 @@ def initiate_index( create_time=None, description="", ): - existing_item = item_exist( - index_table, {"groupName": group_name, "indexId": index_id} - ) + existing_item = item_exist(index_table, {"groupName": group_name, "indexId": index_id}) if not existing_item: if not create_time: @@ -93,14 +87,12 @@ def initiate_index( "modelIds": {"embedding": model_id}, "tag": tag, "createTime": create_time, - "status": Status.ACTIVE.value, + "status": UiStatus.ACTIVE.value, } if index_type != IndexType.INTENTION.value: db_body["description"] = description - create_item( - index_table, {"groupName": group_name, "indexId": index_id}, db_body - ) + create_item(index_table, {"groupName": group_name, "indexId": index_id}, db_body) def create_item_if_not_exist(ddb_table, item_key: dict, body: str): @@ -121,9 +113,7 @@ def initiate_chatbot( index_id_list, create_time=None, ): - existing_item = item_exist( - chatbot_table, {"groupName": group_name, "chatbotId": chatbot_id} - ) + existing_item = item_exist(chatbot_table, {"groupName": group_name, "chatbotId": chatbot_id}) if existing_item: chatbot_table.update_item( Key={"groupName": group_name, "chatbotId": chatbot_id}, @@ -159,7 +149,7 @@ def initiate_chatbot( }, "createTime": create_time, "updateTime": create_time, - "status": Status.ACTIVE.value, + "status": UiStatus.ACTIVE.value, }, ) From e9f200c804c04fb0429f8ce8ba2b99493df8251c Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 03:16:02 +0000 Subject: [PATCH 061/110] feat: update front end for deleting document from knowledge base --- source/portal/src/locale/en.json | 1 + source/portal/src/locale/zh.json | 1 + source/portal/src/pages/library/Library.tsx | 6 +++--- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/source/portal/src/locale/en.json b/source/portal/src/locale/en.json index 099fbfb61..de48cde2f 100644 --- a/source/portal/src/locale/en.json +++ b/source/portal/src/locale/en.json @@ -33,6 +33,7 @@ "deleteSuccess": "Delete Successfully", "completed": "Completed", "inProgress": "In Progress", + "deleting": "Deleting", "failed": "Failed", "item": "item", "items": "items", diff --git a/source/portal/src/locale/zh.json b/source/portal/src/locale/zh.json index c2205f6d6..62cc5279d 100644 --- a/source/portal/src/locale/zh.json +++ b/source/portal/src/locale/zh.json @@ -33,6 +33,7 @@ "deleteSuccess": "删除成功", "completed": "完成", "inProgress": "进行中", + "deleting": "删除中", "failed": "失败", "item": "项目", "items": "项目", diff --git a/source/portal/src/pages/library/Library.tsx b/source/portal/src/pages/library/Library.tsx index 2d294e134..3e5762b75 100644 --- a/source/portal/src/pages/library/Library.tsx +++ b/source/portal/src/pages/library/Library.tsx @@ -108,9 +108,9 @@ const Library: React.FC = () => { if (status === 'COMPLETED') { return {t('completed')}; } else if (status === 'IN-PROGRESS') { - return ( - {t('inProgress')} - ); + return {t('inProgress')}; + } else if (status === 'DELETING') { + return {t('deleting')}; } else { return {t('failed')}; } From 4600d0bc55202e5d6c55da0a630e8004a6f8e629 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Wed, 6 Nov 2024 03:55:46 +0000 Subject: [PATCH 062/110] fix: update monitor --- .../lambda/online/common_logic/common_utils/monitor_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index be1574210..5d5974ab5 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -67,10 +67,10 @@ def format_rag_data(data, qq_result) -> str: source = _generate_markdown_link(raw_source) score = item.get("score", -1) page_content = item.get("page_content", "").replace("\n", "
") - markdown_table += f"| {source} | {raw_source} | {score} | {page_content} |\n\n" + markdown_table += f"| {source} | {raw_source} | {score} | {page_content} |\n" if not is_null_or_empty(qq_result): - markdown_table += "**QQ Match Result**\n" + markdown_table += "\n**QQ Match Result**\n" markdown_table += "| Source File Name | Source URI | Score | Question | Answer |\n" markdown_table += "|-----|-----|-----|-----|-----|\n" From cdb3e156df4b87818e8793f1895bdd48eb4aea8b Mon Sep 17 00:00:00 2001 From: zhouxss Date: Wed, 6 Nov 2024 08:21:31 +0000 Subject: [PATCH 063/110] add lambda tool test --- .../common_utils/lambda_invoke_utils.py | 17 +- .../common_utils/pydantic_models.py | 4 +- .../chains/chat_chain.py | 2 +- .../chains/conversation_summary_chain.py | 5 +- .../chains/tool_calling_chain_api.py | 7 - .../langgraph_integration.py | 12 -- .../retrievers/retriever.py | 11 +- .../langchain_integration/tools/__init__.py | 15 +- .../tools/common_tools/__init__.py | 14 +- .../tools/common_tools/rag.py | 32 ++-- source/lambda/online/lambda_main/main.py | 4 +- .../main_utils/online_entries/common_entry.py | 158 +++++++----------- .../test/main_local_test_common.py | 62 +++++-- 13 files changed, 154 insertions(+), 189 deletions(-) delete mode 100644 source/lambda/online/common_logic/langchain_integration/langgraph_integration.py diff --git a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py index 6f90d9eec..923f3ddde 100644 --- a/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py +++ b/source/lambda/online/common_logic/common_utils/lambda_invoke_utils.py @@ -17,7 +17,9 @@ from .exceptions import LambdaInvokeError logger = get_logger("lambda_invoke_utils") +# thread_local = threading.local() thread_local = threading.local() +CURRENT_STATE = None __FUNC_NAME_MAP = { "query_preprocess": "Preprocess for Multi-round Conversation", @@ -37,17 +39,22 @@ def __init__(self,state): @classmethod def get_current_state(cls): - state = getattr(thread_local,'state',None) + # print("thread id",threading.get_ident(),'parent id',threading.) + # state = getattr(thread_local,'state',None) + state = CURRENT_STATE assert state is not None,"There is not a valid state in current context" return state @classmethod def set_current_state(cls, state): - setattr(thread_local, 'state', state) + global CURRENT_STATE + assert CURRENT_STATE is None, "Parallel node executions are not alowed" + CURRENT_STATE = state @classmethod def clear_state(cls): - setattr(thread_local, 'state', None) + global CURRENT_STATE + CURRENT_STATE = None def __enter__(self): self.set_current_state(self.state) @@ -125,8 +132,9 @@ def invoke_with_lambda(self, lambda_name: str, event_body: dict): ) response_body = invoke_response["Payload"] response_str = response_body.read().decode() - response_body = json.loads(response_str) + if "body" in response_body: + response_body = json.loads(response_body['body']) if "errorType" in response_body: error = ( @@ -136,7 +144,6 @@ def invoke_with_lambda(self, lambda_name: str, event_body: dict): + f"{response_body['errorType']}: {response_body['errorMessage']}" ) raise LambdaInvokeError(error) - return response_body def invoke_with_local( diff --git a/source/lambda/online/common_logic/common_utils/pydantic_models.py b/source/lambda/online/common_logic/common_utils/pydantic_models.py index bfb60bcc5..2cfc90f96 100644 --- a/source/lambda/online/common_logic/common_utils/pydantic_models.py +++ b/source/lambda/online/common_logic/common_utils/pydantic_models.py @@ -88,7 +88,7 @@ class RagToolConfig(AllowBaseModel): class AgentConfig(ForbidBaseModel): llm_config: LLMConfig = Field(default_factory=LLMConfig) - tools: list[str] = Field(default_factory=list) + tools: list[Union[str,dict]] = Field(default_factory=list) only_use_rag_tool: bool = False @@ -114,7 +114,7 @@ class ChatbotConfig(AllowBaseModel): private_knowledge_config: PrivateKnowledgeConfig = Field( default_factory=PrivateKnowledgeConfig ) - tools_config: dict[str, Any] = Field(default_factory=dict) + # tools_config: dict[str, Any] = Field(default_factory=dict) def update_llm_config(self, new_llm_config: dict): """unified update llm config diff --git a/source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py index b51e342d4..434e592a9 100644 --- a/source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/chat_chain.py @@ -313,7 +313,7 @@ class Qwen2Instruct72BChatChain(Qwen2Instruct7BChatChain): class ChatGPT35ChatChain(LLMChain): - model_id = LLMModelType.CHATGPT_35_TURBO + model_id = LLMModelType.CHATGPT_35_TURBO_0125 intent_type = LLMTaskType.CHAT @classmethod diff --git a/source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py b/source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py index 61b67598b..80ee00b26 100644 --- a/source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/conversation_summary_chain.py @@ -2,8 +2,7 @@ from typing import List import json from langchain.schema.runnable import ( - RunnableLambda, - RunnablePassthrough, + RunnableLambda ) @@ -215,8 +214,6 @@ class CohereCommandRPlusConversationSummaryChain(Claude2ConversationSummaryChain - - class Qwen2Instruct72BConversationSummaryChain(Claude2ConversationSummaryChain): model_id = LLMModelType.QWEN2INSTRUCT72B diff --git a/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py index 35c7c0fa4..27e96c729 100644 --- a/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py +++ b/source/lambda/online/common_logic/langchain_integration/chains/tool_calling_chain_api.py @@ -1,15 +1,9 @@ # tool calling chain import json from typing import List,Dict,Any -import re from collections import defaultdict -from langchain.schema.runnable import ( - RunnableLambda, - RunnablePassthrough -) from common_logic.common_utils.prompt_utils import get_prompt_template -from common_logic.common_utils.logger_utils import print_llm_messages from langchain_core.messages import( AIMessage, SystemMessage @@ -142,7 +136,6 @@ def create_chain(cls, model_kwargs=None, **kwargs): ] ) - # chain = RunnablePassthrough.assign(chat_history=lambda x: cls.create_chat_history(x)) | llm chain = tool_calling_template | llm return chain diff --git a/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py b/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py deleted file mode 100644 index 61b264e0a..000000000 --- a/source/lambda/online/common_logic/langchain_integration/langgraph_integration.py +++ /dev/null @@ -1,12 +0,0 @@ - -# set global langgraph app - -current_app = None - -def set_currrent_app(app): - global current_app - current_app = app - -def get_current_app(): - assert current_app is not None - return current_app \ No newline at end of file diff --git a/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py b/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py index ba411211a..d1c9884c8 100644 --- a/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py +++ b/source/lambda/online/common_logic/langchain_integration/retrievers/retriever.py @@ -6,20 +6,19 @@ import boto3 from common_logic.common_utils.chatbot_utils import ChatbotManager -from common_logic.common_utils.lambda_invoke_utils import chatbot_lambda_call_wrapper -from functions.functions_utils.retriever.utils.aos_retrievers import ( +from common_logic.langchain_integration.retrievers.utils.aos_retrievers import ( QueryDocumentBM25Retriever, QueryDocumentKNNRetriever, QueryQuestionRetriever, ) -from functions.functions_utils.retriever.utils.context_utils import ( +from common_logic.langchain_integration.retrievers.utils.context_utils import ( retriever_results_format, ) -from functions.functions_utils.retriever.utils.reranker import ( +from common_logic.langchain_integration.retrievers.utils.reranker import ( BGEReranker, MergeReranker, ) -from functions.functions_utils.retriever.utils.websearch_retrievers import ( +from common_logic.langchain_integration.retrievers.utils.websearch_retrievers import ( GoogleRetriever, ) from langchain.retrievers import ( @@ -125,8 +124,6 @@ def get_custom_retrievers(retriever): return retriever_dict[retriever["index_type"]](retriever) - - def lambda_handler(event, context=None): logger.info(f"Retrieval event: {event}") event_body = event diff --git a/source/lambda/online/common_logic/langchain_integration/tools/__init__.py b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py index f25fd76e2..7a7047287 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/__init__.py @@ -10,17 +10,14 @@ from datamodel_code_generator.model import get_data_model_types from datamodel_code_generator.parser.jsonschema import JsonSchemaParser from langchain.tools.base import StructuredTool as _StructuredTool ,BaseTool -# from langchain_core.pydantic_v1 import BaseModel from common_logic.common_utils.constant import SceneType from common_logic.common_utils.lambda_invoke_utils import invoke_with_lambda from functools import partial - class StructuredTool(_StructuredTool): - pass - # pass_state:bool = False # if pass state into tool invoke - # pass_state_name:str = "state" # pass state name + pass + class ToolIdentifier(BaseModel): scene: SceneType @@ -158,9 +155,7 @@ def register_common_rag_tool( scene=None, name=None, tool_identifier=None, - return_direct=False, - # pass_state=True, - # pass_state_name='state' + return_direct=False ): assert scene == SceneType.COMMON, scene from .common_tools.rag import rag_tool @@ -185,9 +180,7 @@ class Config: tool_def=RagModel ), description=description, - return_direct=return_direct, - # pass_state=pass_state, - # pass_state_name=pass_state_name + return_direct=return_direct ) return ToolManager.register_lc_tool( diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py index 170daa44f..b3f5aa15f 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/__init__.py @@ -102,8 +102,7 @@ def _load_rag_tool(tool_identifier:ToolIdentifier): "description": "query for retrieve", "type": "string" } - }, - # "required": ["query"] + } } ToolManager.register_func_as_tool( scene=tool_identifier.scene, @@ -114,7 +113,6 @@ def _load_rag_tool(tool_identifier:ToolIdentifier): ) - ################### langchain tools ####################### @lazy_tool_load_decorator(SceneType.COMMON,"python_repl") @@ -122,10 +120,16 @@ def _loadd_python_repl_tool(tool_identifier:ToolIdentifier): from langchain_core.tools import Tool from langchain_experimental.utilities import PythonREPL python_repl = PythonREPL() + + def _run(command: str, timeout = None) -> str: + res = python_repl.run(command=command,timeout=timeout) + if not res: + raise ValueError(f"The output is empty, please call this tool again and refine you code, use `print` function to output the value you want to obtain.") + return res repl_tool = Tool( name="python_repl", - description="A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you SHOULD print it out with `print(...)`.", - func=python_repl.run + description="This tool is for arbitrary python code execution, typically scenes include scientific problems, such as math problems, physics problems, etc. Use this to execute python code. Input should be a valid python code. If you want to see the output of a value, you must print it out with `print(...)` statement.", + func=_run ) ToolManager.register_lc_tool( scene=tool_identifier.scene, diff --git a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py index 1e727de1e..e318574ea 100644 --- a/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py +++ b/source/lambda/online/common_logic/langchain_integration/tools/common_tools/rag.py @@ -5,6 +5,8 @@ ) from common_logic.common_utils.lambda_invoke_utils import send_trace from common_logic.langchain_integration.retrievers.retriever import lambda_handler as retrieve_fn +from common_logic.langchain_integration.chains import LLMChain +import threading def rag_tool(retriever_config:dict,query=None): state = StateContext.get_current_state() @@ -14,15 +16,7 @@ def rag_tool(retriever_config:dict,query=None): context_list.extend(state['qq_match_results']) figure_list = [] retriever_params = retriever_config - # retriever_params = state["chatbot_config"]["private_knowledge_config"] retriever_params["query"] = query or state[retriever_config.get("query_key","query")] - # retriever_params["query"] = query - # output: str = invoke_lambda( - # event_body=retriever_params, - # lambda_name="Online_Functions", - # lambda_module_path="functions.functions_utils.retriever.retriever", - # handler_name="lambda_handler", - # ) output = retrieve_fn(retriever_params) for doc in output["result"]["docs"]: @@ -34,7 +28,7 @@ def rag_tool(retriever_config:dict,query=None): unique_figure_list = [dict(t) for t in unique_set] state['extra_response']['figures'] = unique_figure_list - send_trace(f"\n\n**rag-contexts:** {context_list}", enable_trace=state["enable_trace"]) + send_trace(f"\n\n**rag-contexts:**\n\n {context_list}", enable_trace=state["enable_trace"]) group_name = state['chatbot_config']['group_name'] llm_config = state["chatbot_config"]["private_knowledge_config"]['llm_config'] @@ -47,23 +41,21 @@ def rag_tool(retriever_config:dict,query=None): chatbot_id=chatbot_id ) - output: str = invoke_lambda( - lambda_name="Online_LLM_Generate", - lambda_module_path="lambda_llm_generate.llm_generate", - handler_name="lambda_handler", - event_body={ - "llm_config": { + llm_config = { **prompt_templates_from_ddb, **llm_config, "stream": state["stream"], "intent_type": task_type, - }, - "llm_input": { + } + + llm_input = { "contexts": context_list, "query": state["query"], - "chat_history": state["chat_history"], - }, - }, + "chat_history": state["chat_history"] + } + chain = LLMChain.get_chain( + **llm_config ) + output = chain.invoke(llm_input) return output diff --git a/source/lambda/online/lambda_main/main.py b/source/lambda/online/lambda_main/main.py index aca2226e6..ba555bd4b 100644 --- a/source/lambda/online/lambda_main/main.py +++ b/source/lambda/online/lambda_main/main.py @@ -2,6 +2,7 @@ import traceback import uuid from datetime import datetime, timezone +import traceback import boto3 from botocore.exceptions import ClientError @@ -16,6 +17,7 @@ from lambda_main.main_utils.online_entries import get_entry from common_logic.common_utils.response_utils import process_response + logger = get_logger("main") sessions_table_name = os.environ.get("SESSIONS_TABLE_NAME", "") @@ -368,5 +370,5 @@ def lambda_handler(event_body: dict, context: dict): except Exception as e: error_response = {"answer": str(e), "extra_response": {}} process_response(event_body, error_response) - logger.error(f"An error occurred: {str(e)}") + logger.error(f"{traceback.format_exc()}\nAn error occurred: {str(e)}") return {"error": str(e)} diff --git a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py index c9236951b..8512a6443 100644 --- a/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py +++ b/source/lambda/online/lambda_main/main_utils/online_entries/common_entry.py @@ -1,7 +1,6 @@ -import json import traceback +import json from typing import Annotated, Any, TypedDict, List,Union -import copy from common_logic.common_utils.chatbot_utils import ChatbotManager from common_logic.common_utils.constant import ( @@ -21,7 +20,6 @@ from common_logic.common_utils.prompt_utils import get_prompt_templates_from_ddb from common_logic.common_utils.python_utils import add_messages, update_nest_dict from common_logic.common_utils.response_utils import process_response -from common_logic.common_utils.serialization_utils import JSONEncoder from common_logic.langchain_integration.tools import ToolManager from langchain_core.tools import BaseTool from langchain_core.messages.tool import ToolCall @@ -29,7 +27,6 @@ from common_logic.langchain_integration.chains import LLMChain from lambda_main.main_utils.parse_config import CommonConfigParser from langgraph.graph import END, StateGraph -from common_logic.langchain_integration.langgraph_integration import set_currrent_app from common_logic.langchain_integration.retrievers.retriever import lambda_handler as retrieve_fn from common_logic.common_utils.monitor_utils import ( format_preprocess_output, @@ -37,6 +34,9 @@ format_intention_output ) from lambda_intention_detection.intention import get_intention_results +from common_logic.langchain_integration.chains import LLMChain +from common_logic.common_utils.serialization_utils import JSONEncoder + logger = get_logger("common_entry") @@ -308,17 +308,13 @@ def agent(state: ChatbotState): **llm_config ) - - # print(state['chat_history'] + state['agent_tool_history']) agent_message:AIMessage = tool_calling_chain.invoke({ "query":state['query'], "chat_history":state['chat_history'], "agent_tool_history":state['agent_tool_history'] }) - send_trace( - # f"\n\n**agent_current_output:** \n{agent_message}\n\n **agent_current_call_number:** {agent_current_call_number}", f"\n\n**agent_current_output:** \n{agent_message}\n\n", state["stream"], state["ws_connection_id"] @@ -340,23 +336,23 @@ def llm_direct_results_generation(state: ChatbotState): ) logger.info(prompt_templates_from_ddb) - answer: dict = invoke_lambda( - event_body={ - "llm_config": { + llm_config = { **llm_config, "stream": state["stream"], "intent_type": task_type, **prompt_templates_from_ddb, - }, - "llm_input": { + } + + llm_input = { "query": state["query"], "chat_history": state["chat_history"], - }, - }, - lambda_name="Online_LLM_Generate", - lambda_module_path="lambda_llm_generate.llm_generate", - handler_name="lambda_handler", + } + + chain = LLMChain.get_chain( + **llm_config ) + answer = chain.invoke(llm_input) + return {"answer": answer} @@ -371,7 +367,6 @@ def tool_execution(state): """ tools:List[BaseTool] = state['tools'] - def handle_tool_errors(e): content = TOOL_CALL_ERROR_TEMPLATE.format(error=repr(e)) logger.error(f"Tool execution error:\n{traceback.format_exc()}") @@ -383,48 +378,12 @@ def handle_tool_errors(e): ) last_agent_message:AIMessage = state["agent_tool_history"][-1] - # print(last_agent_message) - # pass state to tools if needed - # tools_map = {tool.name:tool for tool in tools} tool_calls = last_agent_message.tool_calls - # tool_calls:List[ToolCall] = copy.deepcopy(last_agent_message.tool_calls) - - # for tool_call in tool_calls: - # tool = tools_map[tool_call['name']] - # if tool.pass_state: - # tool_call['args'].update({tool.pass_state_name:state}) tool_messages:List[ToolMessage] = tool_node.invoke( [AIMessage(content="",tool_calls=tool_calls)] ) - print("tool result",tool_messages[0].content) - - # tool_calls = state['function_calling_parsed_tool_calls'] - # assert len(tool_calls) == 1, tool_calls - # tool_call_results = [] - # for tool_call in tool_calls: - # tool_name = tool_call["name"] - # tool_kwargs = tool_call['kwargs'] - # # call tool - # output = invoke_lambda( - # event_body = { - # "tool_name":tool_name, - # "state":state, - # "kwargs":tool_kwargs - # }, - # lambda_name="Online_Tool_Execute", - # lambda_module_path="functions.lambda_tool", - # handler_name="lambda_handler" - # ) - # tool_call_results.append({ - # "name": tool_name, - # "output": output, - # "kwargs": tool_call['kwargs'], - # "model_id": tool_call['model_id'] - # }) - - # output = format_tool_call_results(tool_call['model_id'],tool_call_results) send_trace(f'**tool_execute_res:** \n{tool_messages}', enable_trace=state["enable_trace"]) return { "agent_tool_history": tool_messages, @@ -550,40 +509,20 @@ def build_graph(chatbot_state_cls): ##################################### # define online sub-graph for agent # ##################################### -# app_agent = None app = None -# def register_rag_tool( -# name: str, -# description: str, -# scene=SceneType.COMMON, -# lambda_name: str = "lambda_common_tools", -# ): -# tool_manager.register_tool( -# { -# "name": name, -# "scene": scene, -# "lambda_name": lambda_name, -# "lambda_module_path": rag.lambda_handler, -# "tool_def": { -# "name": name, -# "description": description, -# }, -# "running_mode": ToolRuningMode.ONCE, -# } -# ) - def register_rag_tool_from_config(event_body: dict): group_name = event_body.get("chatbot_config").get("group_name", "Admin") chatbot_id = event_body.get("chatbot_config").get("chatbot_id", "admin") chatbot_manager = ChatbotManager.from_environ() chatbot = chatbot_manager.get_chatbot(group_name, chatbot_id) - logger.info(chatbot) + logger.info(f"chatbot info: {chatbot}") registered_tool_names = [] for index_type, item_dict in chatbot.index_ids.items(): if index_type != IndexType.INTENTION: for index_content in item_dict["value"].values(): + if "indexId" in index_content and "description" in index_content: # Find retriever contain index_id retrievers = event_body["chatbot_config"]["private_knowledge_config"]['retrievers'] @@ -592,26 +531,54 @@ def register_rag_tool_from_config(event_body: dict): if retriever["index_name"] == index_content["indexId"]: break assert retriever is not None,retrievers - reranks = event_body["chatbot_config"]["private_knowledge_config"]['reranks'] - index_name = index_content["indexId"] + rerankers = event_body["chatbot_config"]["private_knowledge_config"]['rerankers'] + if rerankers: + rerankers = [rerankers[0]] + index_name = index_content["indexId"].replace("-","_") # TODO give specific retriever config ToolManager.register_common_rag_tool( retriever_config={ "retrievers":[retriever], - "reranks":[reranks[0]], + "rerankers":rerankers, "llm_config": event_body["chatbot_config"]["private_knowledge_config"]['llm_config'] }, - # event_body["chatbot_config"]["private_knowledge_config"], name=index_name, scene=SceneType.COMMON, - description=index_content["description"], - # pass_state=True, - # pass_state_name='state' + description=index_content["description"] ) registered_tool_names.append(index_name) return registered_tool_names +def register_custom_lambda_tools_from_config(event_body): + agent_config_tools = event_body['chatbot_config']['agent_config']['tools'] + new_agent_config_tools = [] + for tool in agent_config_tools: + if isinstance(tool,str): + new_agent_config_tools.append(tool) + elif isinstance(tool, dict): + tool_name = tool['name'] + assert tool_name not in new_agent_config_tools, f"repeat tool: {tool_name}\n{agent_config_tools}" + if "lambda_name" in tool: + ToolManager.register_aws_lambda_as_tool( + lambda_name=tool["lambda_name"], + tool_def={ + "description":tool["description"], + "properties":tool['properties'], + "required":tool.get('required',[]) + }, + name=tool_name, + scene=SceneType.COMMON, + return_direct=tool.get("return_direct",False) + ) + new_agent_config_tools.append(tool_name) + else: + raise ValueError(f"tool type {type(tool)}: {tool} is not supported") + + event_body['chatbot_config']['agent_config']['tools'] = new_agent_config_tools + return new_agent_config_tools + + def common_entry(event_body): """ Entry point for the Lambda function. @@ -622,17 +589,11 @@ def common_entry(event_body): if app is None: app = build_graph(ChatbotState) - # if app_agent is None: - # app_agent = build_agent_graph(ChatbotState) - # debuging if is_running_local(): with open("common_entry_workflow.png", "wb") as f: f.write(app.get_graph().draw_mermaid_png()) - # with open("common_entry_agent_workflow.png", "wb") as f: - # f.write(app_agent.get_graph().draw_mermaid_png()) - ################################################################################ # prepare inputs and invoke graph event_body["chatbot_config"] = CommonConfigParser.from_chatbot_config( @@ -650,22 +611,28 @@ def common_entry(event_body): agent_config = event_body["chatbot_config"]["agent_config"] # register as rag tool for each aos index + # print('private_knowledge_config',event_body["chatbot_config"]["private_knowledge_config"]) registered_tool_names = register_rag_tool_from_config(event_body) # update private knowledge tool to agent config for registered_tool_name in registered_tool_names: if registered_tool_name not in agent_config['tools']: agent_config['tools'].append(registered_tool_name) - # define all knowledge rag tool - print('private_knowledge_config',event_body["chatbot_config"]["private_knowledge_config"]) + + + # register lambda tools + register_custom_lambda_tools_from_config(event_body) + # + logger.info(f'event body to graph:\n{json.dumps(event_body,ensure_ascii=False,cls=JSONEncoder)}') + + + # define all knowledge rag tool all_knowledge_rag_tool = ToolManager.register_common_rag_tool( retriever_config=event_body["chatbot_config"]["private_knowledge_config"], name="all_knowledge_rag_tool", scene=SceneType.COMMON, description="all knowledge rag tool", - # pass_state=True, - # pass_state_name='state' ) # invoke graph and get results @@ -687,12 +654,11 @@ def common_entry(event_body): "last_tool_messages":None, "all_knowledge_rag_tool":all_knowledge_rag_tool, "tools":None, - # "agent_repeated_call_limit": chatbot_config["agent_repeated_call_limit"], - # "agent_current_call_number": 0, "ddb_additional_kwargs": {} }, config={"recursion_limit": 10} ) + # print('extra_response',response['extra_response']) return response["app_response"] diff --git a/source/lambda/online/lambda_main/test/main_local_test_common.py b/source/lambda/online/lambda_main/test/main_local_test_common.py index 67546ca1c..f850bde54 100644 --- a/source/lambda/online/lambda_main/test/main_local_test_common.py +++ b/source/lambda/online/lambda_main/test/main_local_test_common.py @@ -139,38 +139,66 @@ def test_multi_turns_agent_pr(): }, ] - # default_index_names = { - # "intention":["pr_test-intention-default"], - # "qq_match": [], - # "private_knowledge": ['pr_test-qd-sso_poc'] - # } + default_index_names = { + "intention":[], + "qq_match": [], + "private_knowledge": [] + } # user_queries = [{ # "query": "今天天气怎么样", # "use_history": True, # "enable_trace": False # }] + # user_queries = [{ + # # "query": "199乘以98等于多少", + # "query": "1234乘以89878等于多少?", + # "use_history": True, + # "enable_trace": True + # }] + # user_queries = [{ + # "query": "199乘以98等于多少", + # # "query": "介绍一下MemGPT", + # "use_history": True, + # "enable_trace": True + # }] user_queries = [{ - # "query": "199乘以98等于多少", - "query": "1234乘以89878等于多少?", + "query": "”我爱北京天安门“包含多少个字符?", + # "query": "介绍一下MemGPT", "use_history": True, "enable_trace": True }] - default_index_names = { - "intention":[], - "qq_match": [], - "private_knowledge": [] - } + # default_index_names = { + # "intention":[], + # "qq_match": [], + # "private_knowledge": [] + # } default_llm_config = { - # 'model_id': 'anthropic.claude-3-sonnet-20240229-v1:0', + 'model_id': 'anthropic.claude-3-sonnet-20240229-v1:0', # 'model_id': "meta.llama3-1-70b-instruct-v1:0", # 'model_id':"mistral.mistral-large-2407-v1:0", - 'model_id':"cohere.command-r-plus-v1:0", + # 'model_id':"cohere.command-r-plus-v1:0", 'model_kwargs': { 'temperature': 0.1, 'max_tokens': 4096 } } + # agent_config={"tools":["python_repl"]} + agent_config={ + "tools":[{ + "lambda_name":"intelli-agent-lambda-tool-example1", + "name": "count_char", + "description": "Count the number of chars contained in a sentence.", + "properties": { + "phrase": { + "type": "string", + "description": "The phrase needs to count chars" + } + }, + "required": ["phrase"], + "return_direct":False + }] + } for query in user_queries: print("==" * 50) @@ -181,11 +209,11 @@ def test_multi_turns_agent_pr(): query=query['query'], use_history=query['use_history'], chatbot_id="admin", - group_name='admin', + group_name='Admin', only_use_rag_tool=False, default_index_names=default_index_names, enable_trace = query.get('enable_trace',True), - agent_config={"tools":["python_repl"]}, + agent_config=agent_config, default_llm_config=default_llm_config ) print() @@ -228,11 +256,9 @@ def complete_test_pr(): print("start test in agent mode") test_multi_turns_agent_pr() print("finish test in agent mode") - print("start test in rag mode") test_multi_turns_rag_pr() print("finish test in rag mode") - print("start test in chat mode") test_multi_turns_chat_pr() # print(srg) From 118a2da58e61956e976e6d03d77d24035dfd7400 Mon Sep 17 00:00:00 2001 From: NingLyu Date: Wed, 6 Nov 2024 09:12:09 +0000 Subject: [PATCH 064/110] ffix: use retrieval content --- .../common_logic/common_utils/monitor_utils.py | 14 ++++++-------- .../retriever/utils/context_utils.py | 5 ++++- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/source/lambda/online/common_logic/common_utils/monitor_utils.py b/source/lambda/online/common_logic/common_utils/monitor_utils.py index 5d5974ab5..25088c104 100644 --- a/source/lambda/online/common_logic/common_utils/monitor_utils.py +++ b/source/lambda/online/common_logic/common_utils/monitor_utils.py @@ -13,10 +13,8 @@ def is_null_or_empty(value): return False -def _generate_markdown_link(file_path: str) -> str: - file_name = file_path.split("/")[-1] - markdown_link = f"[{file_name}]({file_path})" - return markdown_link +def _file_name_in_path(file_path: str) -> str: + return file_path.split("/")[-1] def format_qq_data(data) -> str: @@ -37,7 +35,7 @@ def format_qq_data(data) -> str: markdown_table += "|-----|-----|-----|-----|\n" # Data contains only one QQ match result - qq_source = _generate_markdown_link(data.get("source", "")) + qq_source = _file_name_in_path(data.get("source", "")) qq_score = data.get("score", -1) qq_question = data.get("page_content", "").replace("\n", "
") qq_answer = data.get("answer", "").replace("\n", "
") @@ -64,9 +62,9 @@ def format_rag_data(data, qq_result) -> str: markdown_table += "|-----|-----|-----|-----|\n" for item in data: raw_source = item.get("source", "") - source = _generate_markdown_link(raw_source) + source = _file_name_in_path(raw_source) score = item.get("score", -1) - page_content = item.get("page_content", "").replace("\n", "
") + page_content = item.get("retrieval_content", "").replace("\n", "
") markdown_table += f"| {source} | {raw_source} | {score} | {page_content} |\n" if not is_null_or_empty(qq_result): @@ -76,7 +74,7 @@ def format_rag_data(data, qq_result) -> str: for qq_item in qq_result: raw_qq_source = qq_item.get("source", "") - qq_source = _generate_markdown_link(raw_qq_source) + qq_source = _file_name_in_path(raw_qq_source) qq_score = qq_item.get("score", -1) qq_question = qq_item.get("page_content", "").replace("\n", "
") qq_answer = qq_item.get("answer", "").replace("\n", "
") diff --git a/source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py b/source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py index cada844c0..0b228a475 100644 --- a/source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py +++ b/source/lambda/online/functions/functions_utils/retriever/utils/context_utils.py @@ -24,7 +24,8 @@ def contexts_trunc(docs: list[dict], context_num=2): context_strs.append(content) s.add(content) context_docs.append( - {"doc": content, "source": doc["source"], "score": doc["score"]} + {"doc": content, + "source": doc["source"], "score": doc["score"]} ) context_sources.append(doc["source"]) return { @@ -53,8 +54,10 @@ def retriever_results_format( "answer": doc.metadata.get("answer", ""), "question": doc.metadata.get("question", ""), "figure": doc.metadata.get("figure", []), + "retrieval_content": doc.metadata.get("retrieval_content", ""), } ) + if print_source: source_strs = [] for doc_dict in doc_dicts: From 9d38ee4ab20d840dc17bfc01d0b1a3558785fc83 Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 09:15:26 +0000 Subject: [PATCH 065/110] chore: remove unused code --- source/lambda/etl/create_chatbot.py | 103 ---------------------------- source/lambda/etl/get_status.py | 47 ------------- source/lambda/etl/list_chatbot.py | 55 --------------- 3 files changed, 205 deletions(-) delete mode 100644 source/lambda/etl/create_chatbot.py delete mode 100644 source/lambda/etl/get_status.py delete mode 100644 source/lambda/etl/list_chatbot.py diff --git a/source/lambda/etl/create_chatbot.py b/source/lambda/etl/create_chatbot.py deleted file mode 100644 index 615138222..000000000 --- a/source/lambda/etl/create_chatbot.py +++ /dev/null @@ -1,103 +0,0 @@ -import json -import logging -import os -from datetime import datetime, timezone - -import boto3 -from constant import IndexType -from utils.ddb_utils import ( - initiate_chatbot, - initiate_index, - initiate_model, - is_chatbot_existed, -) - -logger = logging.getLogger() -logger.setLevel(logging.INFO) -region_name = os.environ.get("AWS_REGION") -embedding_endpoint = os.environ.get("EMBEDDING_ENDPOINT") -dynamodb = boto3.resource("dynamodb", region_name=region_name) -index_table = dynamodb.Table(os.environ.get("INDEX_TABLE_NAME")) -chatbot_table = dynamodb.Table(os.environ.get("CHATBOT_TABLE_NAME")) -model_table = dynamodb.Table(os.environ.get("MODEL_TABLE_NAME")) - - -def lambda_handler(event, context): - logger.info(f"event:{event}") - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - input_body = json.loads(event["body"]) - if "groupName" not in input_body: - return { - "statusCode": 400, - "headers": resp_header, - "body": json.dumps( - { - "message": "No groupName in the body, please specify a groupName, e.g. Admin" - } - ), - } - - group_name = input_body["groupName"] - chatbot_id = group_name.lower() - if is_chatbot_existed(chatbot_table, group_name, chatbot_id): - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps( - { - "chatbotId": chatbot_id, - "groupName": group_name, - "message": "Chatbot existed", - } - ), - } - - model_id = f"{chatbot_id}-embedding" - create_time = str(datetime.now(timezone.utc)) - initiate_model(model_table, group_name, model_id, - embedding_endpoint, create_time) - - index_id_list = {} - DESCRIPTION = "Answer question based on search result" - # Iterate over all enum members and create DDB metadata - for member in IndexType.__members__.values(): - index_type = member.value - index_id = tag = f"{chatbot_id}-{index_type}-default" - index_id_list[index_type] = index_id - initiate_index( - index_table, - group_name, - index_id, - model_id, - index_type, - tag, - create_time, - DESCRIPTION, - ) - initiate_chatbot( - chatbot_table, - group_name, - chatbot_id, - index_id, - index_type, - tag, - create_time, - ) - - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps( - { - "chatbotId": chatbot_id, - "groupName": group_name, - "indexIds": index_id_list, - "message": "Chatbot created", - } - ), - } diff --git a/source/lambda/etl/get_status.py b/source/lambda/etl/get_status.py deleted file mode 100644 index 8682a1e34..000000000 --- a/source/lambda/etl/get_status.py +++ /dev/null @@ -1,47 +0,0 @@ -import json -import logging -import os - -import boto3 - -logger = logging.getLogger() -logger.setLevel(logging.INFO) -state_machine_arn = os.environ["SFN_ARN"] - - -def lambda_handler(event, context): - execution_id = event["queryStringParameters"]["executionId"] - sf_client = boto3.client("stepfunctions") - execution_arn = ( - state_machine_arn.replace( - "stateMachine", "execution") + ":" + execution_id - ) - - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - - try: - response = sf_client.describe_execution(executionArn=execution_arn) - - execution_status = response["status"] - logger.info("Execution Status: %s", execution_status) - - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps( - {"execution_id": execution_id, "execution_status": execution_status} - ), - } - except Exception as e: - logger.error("Error: %s", str(e)) - - return { - "statusCode": 500, - "headers": resp_header, - "body": json.dumps(f"Error: {str(e)}"), - } diff --git a/source/lambda/etl/list_chatbot.py b/source/lambda/etl/list_chatbot.py deleted file mode 100644 index b0a5708e0..000000000 --- a/source/lambda/etl/list_chatbot.py +++ /dev/null @@ -1,55 +0,0 @@ -import json -import logging -import os - -import boto3 - -cognito = boto3.client("cognito-idp") - -cognito_user_pool_id = os.environ.get("USER_POOL_ID") - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def lambda_handler(event, context): - - authorizer_type = ( - event["requestContext"].get("authorizer", {}).get("authorizerType") - ) - if authorizer_type == "lambda_authorizer": - claims = json.loads(event["requestContext"]["authorizer"]["claims"]) - cognito_groups = claims["cognito:groups"] - cognito_groups_list = cognito_groups.split(",") - else: - cognito_groups_list = ["Admin"] - - output = {} - - if "Admin" in cognito_groups_list: - # Return a list of all cognito groups - response = cognito.list_groups(UserPoolId=cognito_user_pool_id) - output["chatbot_ids"] = [group["GroupName"] for group in response["Groups"]] - else: - output["chatbot_ids"] = cognito_groups_list - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - - try: - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps(output), - } - except Exception as e: - logger.error("Error: %s", str(e)) - - return { - "statusCode": 500, - "headers": resp_header, - "body": json.dumps(f"Error: {str(e)}"), - } From 7e1ddcf69d8fd0f787b99a3be2a0e33a175548eb Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 11:47:50 +0000 Subject: [PATCH 066/110] feat: merge execution logic into one lambda --- source/infrastructure/lib/api/api-stack.ts | 33 +-- source/lambda/etl/delete_execution.py | 139 ----------- source/lambda/etl/execution_management.py | 265 +++++++++++++++++++++ source/lambda/etl/get_execution.py | 49 ---- source/lambda/etl/list_execution.py | 110 --------- 5 files changed, 273 insertions(+), 323 deletions(-) delete mode 100644 source/lambda/etl/delete_execution.py create mode 100644 source/lambda/etl/execution_management.py delete mode 100644 source/lambda/etl/get_execution.py delete mode 100644 source/lambda/etl/list_execution.py diff --git a/source/infrastructure/lib/api/api-stack.ts b/source/infrastructure/lib/api/api-stack.ts index 254f608b4..7d506606c 100644 --- a/source/infrastructure/lib/api/api-stack.ts +++ b/source/infrastructure/lib/api/api-stack.ts @@ -176,36 +176,19 @@ export class ApiConstruct extends Construct { ], }); - const listExecutionLambda = new LambdaFunction(this, "ListExecution", { + const executionManagementLambda = new LambdaFunction(this, "ExecutionManagementLambda", { code: Code.fromAsset(join(__dirname, "../../../lambda/etl")), - handler: "list_execution.lambda_handler", + handler: "execution_management.lambda_handler", environment: { EXECUTION_TABLE: executionTableName, - }, - statements: [this.iamHelper.dynamodbStatement], - }); - - const getExecutionLambda = new LambdaFunction(this, "GetExecution", { - code: Code.fromAsset(join(__dirname, "../../../lambda/etl")), - handler: "get_execution.lambda_handler", - environment: { ETL_OBJECT_TABLE: etlObjTableName, ETL_OBJECT_INDEX: etlObjIndexName, - }, - statements: [this.iamHelper.dynamodbStatement], - }); - - const delExecutionLambda = new LambdaFunction(this, "DeleteExecution", { - code: Code.fromAsset(join(__dirname, "../../../lambda/etl")), - handler: "delete_execution.lambda_handler", - environment: { SFN_ARN: props.knowledgeBaseStackOutputs.sfnOutput.stateMachineArn, - EXECUTION_TABLE: executionTableName, }, statements: [this.iamHelper.dynamodbStatement], }); - props.knowledgeBaseStackOutputs.sfnOutput.grantStartExecution(delExecutionLambda.function); + props.knowledgeBaseStackOutputs.sfnOutput.grantStartExecution(executionManagementLambda.function); const uploadDocLambda = new LambdaFunction(this, "UploadDocument", { code: Code.fromAsset(join(__dirname, "../../../lambda/etl")), @@ -278,7 +261,7 @@ export class ApiConstruct extends Construct { } apiKBExecution.addMethod( "GET", - new apigw.LambdaIntegration(listExecutionLambda.function), + new apigw.LambdaIntegration(executionManagementLambda.function), {...this.genMethodOption(api, auth, { Items: {type: JsonSchemaType.ARRAY, items: { type: JsonSchemaType.OBJECT, @@ -333,11 +316,11 @@ export class ApiConstruct extends Construct { ); apiKBExecution.addMethod( "DELETE", - new apigw.LambdaIntegration(delExecutionLambda.function), + new apigw.LambdaIntegration(executionManagementLambda.function), { ...this.genMethodOption(api, auth, { - data: { type: JsonSchemaType.ARRAY, items: { type: JsonSchemaType.STRING } }, - message: { type: JsonSchemaType.STRING } + ExecutionIds: { type: JsonSchemaType.ARRAY, items: { type: JsonSchemaType.STRING } }, + Message: { type: JsonSchemaType.STRING } }), requestModels: this.genRequestModel(api, { "executionId": { "type": JsonSchemaType.ARRAY, "items": { "type": JsonSchemaType.STRING } }, @@ -348,7 +331,7 @@ export class ApiConstruct extends Construct { const apiGetExecutionById = apiKBExecution.addResource("{executionId}"); apiGetExecutionById.addMethod( "GET", - new apigw.LambdaIntegration(getExecutionLambda.function), + new apigw.LambdaIntegration(executionManagementLambda.function), { ...this.genMethodOption(api, auth, { Items: { diff --git a/source/lambda/etl/delete_execution.py b/source/lambda/etl/delete_execution.py deleted file mode 100644 index 8ead1bde4..000000000 --- a/source/lambda/etl/delete_execution.py +++ /dev/null @@ -1,139 +0,0 @@ -""" -Lambda function for deleting execution pipelines and associated documents. - -This module handles the deletion of execution pipelines and their corresponding -documents from OpenSearch. It interacts with DynamoDB and Step Functions to -manage the deletion process. -""" - -import json -import logging -import os - -import boto3 -from constant import ExecutionStatus, OperationType, UiStatus - -# Set up logging -logger = logging.getLogger() -logger.setLevel(logging.INFO) - -# Initialize AWS clients -sfn_client = boto3.client("stepfunctions") -dynamodb = boto3.resource("dynamodb") - -# Get environment variables -sfn_arn = os.environ.get("SFN_ARN") -table_name = os.environ.get("EXECUTION_TABLE") -table = dynamodb.Table(table_name) - - -def get_execution_item(execution_id): - """ - Retrieve an execution item from DynamoDB. - - Args: - execution_id (str): The ID of the execution to retrieve. - - Returns: - dict: The execution item if found, None otherwise. - """ - execution_item = table.get_item(Key={"executionId": execution_id}) - if "Item" not in execution_item: - return None - return execution_item["Item"] - - -def update_execution_item(execution_id, execution_status, ui_status): - """ - Update the status of an execution item in DynamoDB. - - Args: - execution_id (str): The ID of the execution to update. - execution_status (str): The new execution status. - ui_status (str): The new UI status. - - Returns: - dict: The response from the DynamoDB update operation. - """ - response = table.update_item( - Key={"executionId": execution_id}, - UpdateExpression="SET executionStatus = :execution_status, uiStatus = :ui_status", - ExpressionAttributeValues={":execution_status": execution_status, ":ui_status": ui_status}, - ReturnValues="UPDATED_NEW", - ) - return response - - -def delete_execution_pipeline(execution_id): - """ - Delete an execution pipeline and its associated document. - - Args: - execution_id (str): The ID of the execution to delete. - - Raises: - Exception: If the execution is not found. - """ - execution_item = get_execution_item(execution_id) - if not execution_item: - raise Exception(f"Execution {execution_id} not found") - - # Update execution item status - update_execution_item(execution_id, ExecutionStatus.DELETING.value, UiStatus.ACTIVE.value) - - # Prepare input for Step Function to delete document from OpenSearch - delete_document_sfn_input = { - "s3Bucket": execution_item["s3Bucket"], - "s3Prefix": execution_item["s3Prefix"], - "chatbotId": execution_item["chatbotId"], - "indexType": execution_item["indexType"], - "operationType": OperationType.DELETE.value, - "indexId": execution_item["indexId"], - "groupName": execution_item["groupName"], - "tableItemId": execution_item["executionId"], - "embeddingModelType": execution_item["embeddingModelType"], - "offline": "true", - } - sfn_client.start_execution(stateMachineArn=sfn_arn, input=json.dumps(delete_document_sfn_input)) - - -def lambda_handler(event, context): - """ - AWS Lambda function handler for deleting execution pipelines. - - Args: - event (dict): The event data passed to the Lambda function. - context (object): The runtime information of the Lambda function. - - Returns: - dict: A response object containing the status code, headers, and body. - """ - logger.info(event) - input_body = json.loads(event["body"]) - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - - try: - # Delete each execution pipeline specified in the input - for execution_id in input_body["executionId"]: - delete_execution_pipeline(execution_id) - - # Prepare success response - output = {"message": "The deletion of specified documents has started", "data": input_body["executionId"]} - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps(output), - } - except Exception as e: - # Log and return error response - logger.error("Error: %s", str(e)) - return { - "statusCode": 500, - "headers": resp_header, - "body": json.dumps(f"Error: {str(e)}"), - } diff --git a/source/lambda/etl/execution_management.py b/source/lambda/etl/execution_management.py new file mode 100644 index 000000000..71def2e85 --- /dev/null +++ b/source/lambda/etl/execution_management.py @@ -0,0 +1,265 @@ +""" +Lambda function for managing execution pipelines and associated documents. +Provides REST API endpoints for CRUD operations on execution pipelines, +handling document management in DynamoDB and OpenSearch. +""" + +import json +import logging +import os +from dataclasses import dataclass +from typing import Any, Dict, Iterator, List, Optional + +import boto3 +from boto3.dynamodb.conditions import Key +from botocore.paginate import TokenEncoder +from botocore.paginator import Paginator +from constant import ExecutionStatus, OperationType, UiStatus + +# Configure logging +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +@dataclass +class AwsResources: + """Centralized AWS resource management""" + + sfn_client = boto3.client("stepfunctions") + dynamodb = boto3.resource("dynamodb") + dynamodb_client = boto3.client("dynamodb") + + def __post_init__(self): + # Initialize DynamoDB tables + self.execution_table = self.dynamodb.Table(Config.EXECUTION_TABLE_NAME) + self.object_table = self.dynamodb.Table(Config.ETL_OBJECT_TABLE_NAME) + + +class Config: + """Configuration constants""" + + SFN_ARN = os.environ["SFN_ARN"] + EXECUTION_TABLE_NAME = os.environ["EXECUTION_TABLE"] + ETL_OBJECT_TABLE_NAME = os.environ["ETL_OBJECT_TABLE"] + ETL_OBJECT_INDEX = os.environ["ETL_OBJECT_INDEX"] + DEFAULT_PAGE_SIZE = 50 + DEFAULT_MAX_ITEMS = 50 + + CORS_HEADERS = { + "Content-Type": "application/json", + "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "*", + } + + +# Initialize AWS resources +aws_resources = AwsResources() +token_encoder = TokenEncoder() + + +class PaginationConfig: + + @staticmethod + def get_query_parameter(event: Dict[str, Any], parameter_name: str, default_value: Any = None) -> Any: + """Extract query parameter from event with default value""" + if event.get("queryStringParameters") and parameter_name in event["queryStringParameters"]: + return event["queryStringParameters"][parameter_name] + return default_value + + @classmethod + def get_pagination_config(cls, event: Dict[str, Any]) -> Dict[str, Any]: + """Build pagination configuration from event parameters""" + return { + "MaxItems": int(cls.get_query_parameter(event, "max_items", Config.DEFAULT_MAX_ITEMS)), + "PageSize": int(cls.get_query_parameter(event, "page_size", Config.DEFAULT_PAGE_SIZE)), + "StartingToken": cls.get_query_parameter(event, "starting_token"), + } + + +class AuthorizationHelper: + @staticmethod + def get_cognito_groups(event: Dict[str, Any]) -> List[str]: + """Extract and validate Cognito groups from event authorizer""" + authorizer = event["requestContext"].get("authorizer", {}) + authorizer_type = authorizer.get("authorizerType") + + if authorizer_type != "lambda_authorizer": + logger.error("Invalid authorizer type") + raise ValueError("Invalid authorizer type") + + claims = json.loads(authorizer["claims"]) + + if "use_api_key" in claims: + return [claims.get("GroupName", "Admin")] + + return claims["cognito:groups"].split(",") + + +class ExecutionManager: + """Handles execution-related database operations""" + + @staticmethod + def get_execution(execution_id: str) -> Optional[Dict]: + """Retrieve execution details from DynamoDB""" + response = aws_resources.execution_table.get_item(Key={"executionId": execution_id}) + return response.get("Item") + + @staticmethod + def update_execution_status(execution_id: str, execution_status: str, ui_status: str) -> Dict: + """Update execution status in DynamoDB""" + return aws_resources.execution_table.update_item( + Key={"executionId": execution_id}, + UpdateExpression="SET executionStatus = :execution_status, uiStatus = :ui_status", + ExpressionAttributeValues={":execution_status": execution_status, ":ui_status": ui_status}, + ReturnValues="UPDATED_NEW", + ) + + @staticmethod + def delete_execution(execution_id: str) -> None: + """Initiate execution deletion process""" + execution = ExecutionManager.get_execution(execution_id) + if not execution: + raise ValueError(f"Execution {execution_id} not found") + + # Update status to indicate deletion in progress + ExecutionManager.update_execution_status(execution_id, ExecutionStatus.DELETING.value, UiStatus.ACTIVE.value) + + # Prepare deletion input for Step Function + deletion_input = { + "s3Bucket": execution["s3Bucket"], + "s3Prefix": execution["s3Prefix"], + "chatbotId": execution["chatbotId"], + "indexType": execution["indexType"], + "operationType": OperationType.DELETE.value, + "indexId": execution["indexId"], + "groupName": execution["groupName"], + "tableItemId": execution["executionId"], + "embeddingModelType": execution["embeddingModelType"], + "offline": "true", + } + + aws_resources.sfn_client.start_execution(stateMachineArn=Config.SFN_ARN, input=json.dumps(deletion_input)) + + @staticmethod + def get_filtered_executions( + paginator: Paginator, cognito_groups: List[str], pagination_config: Dict[str, Any] + ) -> Dict[str, Any]: + """Get filtered executions based on user groups""" + if "Admin" in cognito_groups: + response_iterator = paginator.paginate( + TableName=Config.EXECUTION_TABLE_NAME, + PaginationConfig=pagination_config, + FilterExpression="uiStatus = :active", + ExpressionAttributeValues={":active": {"S": "ACTIVE"}}, + ) + else: + response_iterator = paginator.paginate( + TableName=Config.EXECUTION_TABLE_NAME, + PaginationConfig=pagination_config, + FilterExpression="uiStatus = :active AND groupName = :group_id", + ExpressionAttributeValues={ + ":active": {"S": "ACTIVE"}, + ":group_id": {"S": cognito_groups[0]}, + }, + ) + + output = {} + encoder = TokenEncoder() + + for page in response_iterator: + page_items = page["Items"] + processed_items = [] + + for item in page_items: + processed_item = {key: value["S"] for key, value in item.items()} + processed_items.append(processed_item) + + output["Items"] = processed_items + output["Count"] = page["Count"] + output["Config"] = pagination_config + + if "LastEvaluatedKey" in page: + output["LastEvaluatedKey"] = encoder.encode({"ExclusiveStartKey": page["LastEvaluatedKey"]}) + + return output + + +class ApiResponse: + """Standardized API response handler""" + + @staticmethod + def success(data: Any, status_code: int = 200) -> Dict: + return {"statusCode": status_code, "headers": Config.CORS_HEADERS, "body": json.dumps(data)} + + @staticmethod + def error(message: str, status_code: int = 500) -> Dict: + logger.error("Error: %s", message) + return {"statusCode": status_code, "headers": Config.CORS_HEADERS, "body": json.dumps({"error": str(message)})} + + +class ApiHandler: + """API endpoint handlers""" + + def __init__(self): + self.execution_service = ExecutionService(aws_resources.dynamodb_client) + + @staticmethod + def delete_executions(event: Dict) -> Dict: + """Handle DELETE /executions endpoint""" + try: + execution_ids = json.loads(event["body"])["executionId"] + for execution_id in execution_ids: + ExecutionManager.delete_execution(execution_id) + + return ApiResponse.success({"Message": "Deletion process initiated", "ExecutionIds": execution_ids}) + except Exception as e: + return ApiResponse.error(str(e)) + + @staticmethod + def get_execution_objects(event: Dict) -> Dict: + """Handle GET /executions/{executionId}/objects endpoint""" + try: + execution_id = event["pathParameters"]["executionId"] + response = aws_resources.object_table.query( + IndexName=Config.ETL_OBJECT_INDEX, KeyConditionExpression=Key("executionId").eq(execution_id) + ) + + return ApiResponse.success({"Items": response["Items"], "Count": response["Count"]}) + except Exception as e: + return ApiResponse.error(str(e)) + + @staticmethod + def list_executions(event: Dict) -> Dict: + """Handle GET /executions endpoint""" + try: + # Get cognito groups and pagination config and paginator + cognito_groups = AuthorizationHelper.get_cognito_groups(event) + pagination_config = PaginationConfig.get_pagination_config(event) + paginator = aws_resources.dynamodb_client.get_paginator("scan") + + # Get and process executions + result = ExecutionManager.get_filtered_executions(paginator, cognito_groups, pagination_config) + + return ApiResponse.success(result) + except ValueError as ve: + return ApiResponse.error(str(ve), 403) + except Exception as e: + return ApiResponse.error(str(e)) + + +def lambda_handler(event: Dict, context: Any) -> Dict: + """Routes API requests to appropriate handlers based on HTTP method and path""" + logger.info("Received event: %s", json.dumps(event)) + + routes = { + ("DELETE", "/knowledge-base/executions"): ApiHandler.delete_executions, + ("GET", "/knowledge-base/executions/{executionId}"): ApiHandler.get_execution_objects, + ("GET", "/knowledge-base/executions"): ApiHandler.list_executions, + } + + handler = routes.get((event["httpMethod"], event["resource"])) + if not handler: + return ApiResponse.error("Route not found", 404) + + return handler(event) diff --git a/source/lambda/etl/get_execution.py b/source/lambda/etl/get_execution.py deleted file mode 100644 index bdf84289d..000000000 --- a/source/lambda/etl/get_execution.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -import logging -import os - -import boto3 -from boto3.dynamodb.conditions import Key - -logger = logging.getLogger() -logger.setLevel(logging.INFO) -dynamodb = boto3.resource('dynamodb') -table_name = os.environ.get('ETL_OBJECT_TABLE') -index_name = os.environ.get('ETL_OBJECT_INDEX') -object_table = dynamodb.Table(table_name) - - -def lambda_handler(event, context): - # API Gateway validates parameters - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - - try: - execution_id = event["pathParameters"]["executionId"] - response = object_table.query( - IndexName=index_name, - KeyConditionExpression=Key('executionId').eq(execution_id) - ) - logger.info(response) - output = { - "Items": response["Items"], - "Count": response["Count"] - } - - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps(output), - } - except Exception as e: - logger.error("Error: %s", str(e)) - - return { - "statusCode": 500, - "headers": resp_header, - "body": json.dumps(f"Error: {str(e)}"), - } diff --git a/source/lambda/etl/list_execution.py b/source/lambda/etl/list_execution.py deleted file mode 100644 index d1a082f32..000000000 --- a/source/lambda/etl/list_execution.py +++ /dev/null @@ -1,110 +0,0 @@ -import json -import logging -import os - -import boto3 -from botocore.paginate import TokenEncoder - -DEFAULT_MAX_ITEMS = 50 -DEFAULT_SIZE = 50 -logger = logging.getLogger() -logger.setLevel(logging.INFO) -client = boto3.client("dynamodb") -table_name = os.environ.get("EXECUTION_TABLE") -encoder = TokenEncoder() - - -def get_query_parameter(event, parameter_name, default_value=None): - if event.get("queryStringParameters") and parameter_name in event["queryStringParameters"]: - return event["queryStringParameters"][parameter_name] - return default_value - - -def lambda_handler(event, context): - logger.info(event) - resp_header = { - "Content-Type": "application/json", - "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token", - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "*", - } - - authorizer_type = event["requestContext"].get("authorizer", {}).get("authorizerType") - if authorizer_type == "lambda_authorizer": - claims = json.loads(event["requestContext"]["authorizer"]["claims"]) - if "use_api_key" in claims: - group_name = get_query_parameter(event, "GroupName", "Admin") - cognito_groups_list = [group_name] - else: - cognito_groups = claims["cognito:groups"] - cognito_groups_list = cognito_groups.split(",") - else: - logger.error("Invalid authorizer type") - return { - "statusCode": 403, - "headers": resp_header, - "body": json.dumps({"error": "Invalid authorizer type"}), - } - - max_items = get_query_parameter(event, "max_items", DEFAULT_MAX_ITEMS) - page_size = get_query_parameter(event, "page_size", DEFAULT_SIZE) - starting_token = get_query_parameter(event, "starting_token") - - config = { - "MaxItems": int(max_items), - "PageSize": int(page_size), - "StartingToken": starting_token, - } - - # Use query after adding a filter - paginator = client.get_paginator("scan") - - if "Admin" in cognito_groups_list: - response_iterator = paginator.paginate( - TableName=table_name, - PaginationConfig=config, - FilterExpression="uiStatus = :active", - ExpressionAttributeValues={":active": {"S": "ACTIVE"}}, - ) - else: - response_iterator = paginator.paginate( - TableName=table_name, - PaginationConfig=config, - FilterExpression="uiStatus = :active AND groupName = :group_id", - ExpressionAttributeValues={ - ":active": {"S": "ACTIVE"}, - ":group_id": {"S": cognito_groups_list[0]}, - }, - ) - - output = {} - for page in response_iterator: - page_items = page["Items"] - page_json = [] - for item in page_items: - item_json = {} - for key in item.keys(): - item_json[key] = item[key]["S"] - page_json.append(item_json) - # Return the latest page - output["Items"] = page_json - output["Count"] = page["Count"] - if "LastEvaluatedKey" in page: - output["LastEvaluatedKey"] = encoder.encode({"ExclusiveStartKey": page["LastEvaluatedKey"]}) - - output["Config"] = config - - try: - return { - "statusCode": 200, - "headers": resp_header, - "body": json.dumps(output), - } - except Exception as e: - logger.error("Error: %s", str(e)) - - return { - "statusCode": 500, - "headers": resp_header, - "body": json.dumps(f"Error: {str(e)}"), - } From 85748a75e99bfccf497c28f2fb8c9bc438d84a6f Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 12:36:11 +0000 Subject: [PATCH 067/110] feat: add update for execution management --- source/lambda/etl/constant.py | 2 +- source/lambda/etl/execution_management.py | 90 +++++++++++++++++++++++ 2 files changed, 91 insertions(+), 1 deletion(-) diff --git a/source/lambda/etl/constant.py b/source/lambda/etl/constant.py index b97a7135b..480865d98 100644 --- a/source/lambda/etl/constant.py +++ b/source/lambda/etl/constant.py @@ -22,7 +22,7 @@ class ExecutionStatus(Enum): COMPLETED = "COMPLETED" DELETING = "DELETING" DELETED = "DELETED" - + UPDATING = "UPDATING" class EmbeddingModelType(Enum): BEDROCK_TITAN_V1 = "amazon.titan-embed-text-v1" diff --git a/source/lambda/etl/execution_management.py b/source/lambda/etl/execution_management.py index 71def2e85..6ea78e103 100644 --- a/source/lambda/etl/execution_management.py +++ b/source/lambda/etl/execution_management.py @@ -184,6 +184,78 @@ def get_filtered_executions( return output + @staticmethod + def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefix: str) -> Dict[str, Any]: + """Update execution details in DynamoDB + + Args: + execution_id: The ID of the execution to update + update_data: Dictionary containing fields to update + + Returns: + Updated execution item + + Raises: + ValueError: If execution not found or invalid update data + """ + # Verify execution exists + execution = ExecutionManager.get_execution(execution_id) + if not execution: + raise ValueError(f"Execution {execution_id} not found") + + ExecutionManager.update_execution_status(execution_id, ExecutionStatus.UPDATING.value, UiStatus.ACTIVE.value) + + existing_s3_bucket = execution["s3Bucket"] + existing_s3_prefix = execution["s3Prefix"] + + if existing_s3_bucket == updated_s3_bucket and existing_s3_prefix == updated_s3_prefix: + update_input = { + "s3Bucket": execution["s3Bucket"], + "s3Prefix": execution["s3Prefix"], + "chatbotId": execution["chatbotId"], + "indexType": execution["indexType"], + "operationType": OperationType.UPDATE.value, + "indexId": execution["indexId"], + "groupName": execution["groupName"], + "tableItemId": execution["executionId"], + "embeddingModelType": execution["embeddingModelType"], + "offline": "true", + } + else: + # Prepare deletion input for Step Function + deletion_input = { + "s3Bucket": execution["s3Bucket"], + "s3Prefix": execution["s3Prefix"], + "chatbotId": execution["chatbotId"], + "indexType": execution["indexType"], + "operationType": OperationType.DELETE.value, + "indexId": execution["indexId"], + "groupName": execution["groupName"], + "tableItemId": execution["executionId"], + "embeddingModelType": execution["embeddingModelType"], + "offline": "true", + } + + aws_resources.sfn_client.start_execution(stateMachineArn=Config.SFN_ARN, input=json.dumps(deletion_input)) + + # Create new execution for the updated S3 bucket and prefix + update_execution_input = { + "s3Bucket": updated_s3_bucket, + "s3Prefix": updated_s3_prefix, + "chatbotId": execution["chatbotId"], + "indexType": execution["indexType"], + "operationType": OperationType.CREATE.value, + "indexId": execution["indexId"], + "groupName": execution["groupName"], + "tableItemId": execution["executionId"], + "embeddingModelType": execution["embeddingModelType"], + "offline": "true", + } + + aws_resources.sfn_client.start_execution(stateMachineArn=Config.SFN_ARN, input=json.dumps(update_execution_input)) + + return response.get("Attributes", {}) + class ApiResponse: """Standardized API response handler""" @@ -247,6 +319,23 @@ def list_executions(event: Dict) -> Dict: except Exception as e: return ApiResponse.error(str(e)) + @staticmethod + def update_execution(event: Dict) -> Dict: + """Handle PUT /executions/{executionId} endpoint""" + try: + event_body = json.loads(event["body"]) + execution_id = event_body["executionId"] + updated_s3_bucket = event_body["s3Bucket"] + updated_s3_prefix = event_body["s3Prefix"] + + updated_execution = ExecutionManager.update_execution(execution_id, update_data) + return ApiResponse.success(updated_execution) + + except ValueError as ve: + return ApiResponse.error(str(ve), 400) + except Exception as e: + return ApiResponse.error(str(e)) + def lambda_handler(event: Dict, context: Any) -> Dict: """Routes API requests to appropriate handlers based on HTTP method and path""" @@ -256,6 +345,7 @@ def lambda_handler(event: Dict, context: Any) -> Dict: ("DELETE", "/knowledge-base/executions"): ApiHandler.delete_executions, ("GET", "/knowledge-base/executions/{executionId}"): ApiHandler.get_execution_objects, ("GET", "/knowledge-base/executions"): ApiHandler.list_executions, + ("PUT", "/knowledge-base/executions/{executionId}"): ApiHandler.update_execution, } handler = routes.get((event["httpMethod"], event["resource"])) From afd7c1bd576381d3f51cb12419e80afdfd09ee8f Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 12:54:59 +0000 Subject: [PATCH 068/110] fix: fix execution notification issue --- source/lambda/etl/execution_management.py | 53 ++++++++++++----------- source/lambda/etl/notification.py | 11 ++++- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/source/lambda/etl/execution_management.py b/source/lambda/etl/execution_management.py index 6ea78e103..fb1fc65f6 100644 --- a/source/lambda/etl/execution_management.py +++ b/source/lambda/etl/execution_management.py @@ -8,12 +8,11 @@ import logging import os from dataclasses import dataclass -from typing import Any, Dict, Iterator, List, Optional +from typing import Any, Dict, List, Optional import boto3 from boto3.dynamodb.conditions import Key from botocore.paginate import TokenEncoder -from botocore.paginator import Paginator from constant import ExecutionStatus, OperationType, UiStatus # Configure logging @@ -143,7 +142,7 @@ def delete_execution(execution_id: str) -> None: @staticmethod def get_filtered_executions( - paginator: Paginator, cognito_groups: List[str], pagination_config: Dict[str, Any] + paginator, cognito_groups: List[str], pagination_config: Dict[str, Any] ) -> Dict[str, Any]: """Get filtered executions based on user groups""" if "Admin" in cognito_groups: @@ -155,7 +154,7 @@ def get_filtered_executions( ) else: response_iterator = paginator.paginate( - TableName=Config.EXECUTION_TABLE_NAME, + TableName=Config.EXECUTION_TABLE_NAME, PaginationConfig=pagination_config, FilterExpression="uiStatus = :active AND groupName = :group_id", ExpressionAttributeValues={ @@ -185,16 +184,17 @@ def get_filtered_executions( return output @staticmethod - def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefix: str) -> Dict[str, Any]: + def update_execution(execution_id: str, update_s3_bucket: str, update_s3_prefix: str) -> Dict[str, Any]: """Update execution details in DynamoDB - + Args: execution_id: The ID of the execution to update - update_data: Dictionary containing fields to update - + update_s3_bucket: The new S3 bucket + update_s3_prefix: The new S3 prefix + Returns: Updated execution item - + Raises: ValueError: If execution not found or invalid update data """ @@ -202,14 +202,14 @@ def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefi execution = ExecutionManager.get_execution(execution_id) if not execution: raise ValueError(f"Execution {execution_id} not found") - + ExecutionManager.update_execution_status(execution_id, ExecutionStatus.UPDATING.value, UiStatus.ACTIVE.value) existing_s3_bucket = execution["s3Bucket"] existing_s3_prefix = execution["s3Prefix"] - if existing_s3_bucket == updated_s3_bucket and existing_s3_prefix == updated_s3_prefix: - update_input = { + if existing_s3_bucket == update_s3_bucket and existing_s3_prefix == update_s3_prefix: + update_execution_input = { "s3Bucket": execution["s3Bucket"], "s3Prefix": execution["s3Prefix"], "chatbotId": execution["chatbotId"], @@ -221,6 +221,10 @@ def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefi "embeddingModelType": execution["embeddingModelType"], "offline": "true", } + + aws_resources.sfn_client.start_execution( + stateMachineArn=Config.SFN_ARN, input=json.dumps(update_execution_input) + ) else: # Prepare deletion input for Step Function deletion_input = { @@ -240,8 +244,8 @@ def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefi # Create new execution for the updated S3 bucket and prefix update_execution_input = { - "s3Bucket": updated_s3_bucket, - "s3Prefix": updated_s3_prefix, + "s3Bucket": update_s3_bucket, + "s3Prefix": update_s3_prefix, "chatbotId": execution["chatbotId"], "indexType": execution["indexType"], "operationType": OperationType.CREATE.value, @@ -252,9 +256,11 @@ def update_execution(execution_id: str, updated_s3_bucket: str, updated_s3_prefi "offline": "true", } - aws_resources.sfn_client.start_execution(stateMachineArn=Config.SFN_ARN, input=json.dumps(update_execution_input)) - - return response.get("Attributes", {}) + aws_resources.sfn_client.start_execution( + stateMachineArn=Config.SFN_ARN, input=json.dumps(update_execution_input) + ) + + return {"Message": "Update process initiated"} class ApiResponse: @@ -273,9 +279,6 @@ def error(message: str, status_code: int = 500) -> Dict: class ApiHandler: """API endpoint handlers""" - def __init__(self): - self.execution_service = ExecutionService(aws_resources.dynamodb_client) - @staticmethod def delete_executions(event: Dict) -> Dict: """Handle DELETE /executions endpoint""" @@ -325,12 +328,12 @@ def update_execution(event: Dict) -> Dict: try: event_body = json.loads(event["body"]) execution_id = event_body["executionId"] - updated_s3_bucket = event_body["s3Bucket"] - updated_s3_prefix = event_body["s3Prefix"] - - updated_execution = ExecutionManager.update_execution(execution_id, update_data) + update_s3_bucket = event_body["s3Bucket"] + update_s3_prefix = event_body["s3Prefix"] + + updated_execution = ExecutionManager.update_execution(execution_id, update_s3_bucket, update_s3_prefix) return ApiResponse.success(updated_execution) - + except ValueError as ve: return ApiResponse.error(str(ve), 400) except Exception as e: diff --git a/source/lambda/etl/notification.py b/source/lambda/etl/notification.py index f92066c2f..b60df7025 100644 --- a/source/lambda/etl/notification.py +++ b/source/lambda/etl/notification.py @@ -11,6 +11,11 @@ execution_table = dynamodb.Table(os.environ.get("EXECUTION_TABLE")) +def get_execution_item(execution_id): + response = execution_table.get_item(Key={"executionId": execution_id}) + return response.get("Item", {}) + + def update_execution_item(execution_id, execution_status, ui_status): """ Update the status of an execution item in DynamoDB. @@ -39,9 +44,13 @@ def lambda_handler(event, context): message = json.loads(event["Records"][0]["Sns"]["Message"]) execution_id = message["executionId"] + + current_execution = get_execution_item(execution_id) + current_execution_status = current_execution["executionStatus"] operation_type = message["operationType"] if operation_type == OperationType.DELETE.value: - update_execution_item(execution_id, ExecutionStatus.DELETED.value, UiStatus.INACTIVE.value) + if current_execution_status == ExecutionStatus.DELETING.value: + update_execution_item(execution_id, ExecutionStatus.DELETING.value, UiStatus.INACTIVE.value) else: update_execution_item(execution_id, ExecutionStatus.COMPLETED.value, UiStatus.ACTIVE.value) From faf5d64d40ba9dc750fc4c9557584b918b33a9d9 Mon Sep 17 00:00:00 2001 From: Xu Han Date: Wed, 6 Nov 2024 13:21:13 +0000 Subject: [PATCH 069/110] feat: support edit and delete on front end --- source/portal/src/pages/library/Library.tsx | 33 ++++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/source/portal/src/pages/library/Library.tsx b/source/portal/src/pages/library/Library.tsx index 3e5762b75..4bce6fb6e 100644 --- a/source/portal/src/pages/library/Library.tsx +++ b/source/portal/src/pages/library/Library.tsx @@ -3,6 +3,7 @@ import CommonLayout from 'src/layout/CommonLayout'; import { Box, Button, + ButtonDropdown, CollectionPreferences, ContentLayout, Header, @@ -27,7 +28,7 @@ const parseDate = (item: LibraryListItem) => { const Library: React.FC = () => { const [selectedItems, setSelectedItems] = useState([]); const fetchData = useAxiosRequest(); - const [visible, setVisible] = useState(false); + const [showDelete, setShowDelete] = useState(false); const { t } = useTranslation(); const [loadingData, setLoadingData] = useState(false); const [allLibraryList, setAllLibraryList] = useState([]); @@ -81,7 +82,7 @@ const Library: React.FC = () => { method: 'delete', data: { executionId: selectedItems.map((item) => item.executionId) }, }); - setVisible(false); + setShowDelete(false); getLibraryList(); alertMsg(data.message, 'success'); setLoadingDelete(false); @@ -284,14 +285,24 @@ const Library: React.FC = () => { getLibraryList(); }} /> - + {t('button.action')} + + + } + > + {t('indexList')} + + } + > + { + if(item.status === "old"){ + return item.name + } else { + return () + } + }, + sortingField: "name", + isRowHeader: true, + }, + { + id: "type", + header: t('indexType'), + cell: item => { + if(item.status === "old"){ + return item.type + } else { + return () + } + }, + sortingField: "type" + }, + { + id: "desc", + header: t('desc'), + cell: item => { + if(item.status === "old"){ + return item.description + } else { + return () + } + }, + editConfig: { + ariaLabel: "Name", + editIconAriaLabel: "editable", + errorIconAriaLabel: "Name Error", + editingCell: ( + item, + { currentValue, setValue } + ) => { + return ( + + setValue(event.detail.value) + } + /> + ); + }, + } + }, + { + id: "tag", + header: t('tag'), + cell: item => { + if(item.status === "old"){ + return item.tag + } else { + return () + } + }, + } + ]} + columnDisplay={[ + { id: "name", visible: true }, + { id: "type", visible: true }, + { id: "desc", visible: true }, + { id: "tag", visible: true } + ]} + enableKeyboardNavigation + items={tableIndexList||[]} + loadingText="Loading resources" + stickyHeader + trackBy="name" + sortingDisabled + empty={ + + + {t('empty')} + + + } + filter={ + setSearchIndexName(detail.filteringText)} + /> + } + pagination={ + setCurrentPage(detail.currentPageIndex)} + /> + } + preferences={ + + } + /> + +
+ {/*
í */} +
+ + + + +
+
+ + + + + ); +}; +export default ChatbotDetail; diff --git a/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx b/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx index 445725598..b7ab9ebca 100644 --- a/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx +++ b/source/portal/src/pages/chatbotManagement/ChatbotManagement.tsx @@ -4,7 +4,7 @@ import { Alert, Box, Button, - // ButtonDropdown, + ButtonDropdown, CollectionPreferences, ContentLayout, FormField, @@ -18,24 +18,57 @@ import { SpaceBetween, Table, Toggle, + Link } from '@cloudscape-design/components'; import { - // CreateChatbotResponse, ChatbotItem, ChatbotResponse, - // chatbotDetail, - CreEditChatbotResponse + CreEditChatbotResponse, + SelectedOption } from 'src/types'; import useAxiosRequest from 'src/hooks/useAxiosRequest'; import { useTranslation } from 'react-i18next'; import { formatTime } from 'src/utils/utils'; import ConfigContext from 'src/context/config-context'; import { EMBEDDING_MODEL_LIST } from 'src/utils/const'; +import { useNavigate } from 'react-router-dom'; +import RightModal from '../right-modal'; +import minus from 'src/assets/images/minus.png'; +import plus from 'src/assets/images/plus.png'; +import './style.scss'; + +interface INDEX_TYPE { + name:string, + type: string, + tag: string, + desc: string, + errText: string +} const ChatbotManagement: React.FC = () => { + const { t } = useTranslation(); +const INITIAL_INDEX_LIST: INDEX_TYPE[]=[{ + name: "", + type: "qq", + tag: "", + desc: t('defaultIndexDesc'), + errText: "" +},{ + name: "", + type: "qd", + tag: "", + desc: t('defaultIndexDesc'), + errText: "" +},{ + name: "", + type: "intention", + tag: "", + desc: t('defaultIndexDesc'), + errText: "" +}] const [selectedItems, setSelectedItems] = useState([]); const fetchData = useAxiosRequest(); - const { t } = useTranslation(); + const [loadingData, setLoadingData] = useState(false); const [allChatbotList, setAllChatbotList] = useState([]); const [tableChatbotList, setTableChatbotList] = useState([]); @@ -47,29 +80,31 @@ const ChatbotManagement: React.FC = () => { const [showEdit, setShowEdit] = useState(false); const [loadingSave, setLoadingSave] = useState(false); const [modelList, setModelList] = useState([]); - // const [chatbotList, setChatbotList] = useState([]); const [modelOption, setModelOption] = useState<{label:string;value:string} | null>( null, ); - // const [chatbotOption, setChatbotOption] = useState( - // null, - // ); const [chatbotName, setChatbotName] = useState(''); const [chatbotNameError, setChatbotNameError] = useState(''); - - // const [loadingGet, setLoadingGet] = useState(false); // validation const [modelError, setModelError] = useState(''); - // const [chatbotError, setChatbotError] = useState(''); - const [showDelete, setShowDelete] = useState(false); const [useDefaultIndex, setUseDefaultIndex] = useState(true); const [qqIndex, setQqIndex] = useState(''); const [qdIndex, setQdIndex] = useState(''); const [intentionIndex, setIntentionIndex] = useState(''); + + const [qqIndexDesc, setQqIndexDesc] = useState(t('defaultIndexDesc')); + const [qdIndexDesc, setQdIndexDesc] = useState(t('defaultIndexDesc')); + const [intentionIndexDesc, setIntentionIndexDesc] = useState(t('defaultIndexDesc')); + const [qqIndexError, setQqIndexError] = useState(''); const [qdIndexError, setQdIndexError] = useState(''); const [intentionIndexError, setIntentionIndexError] = useState(''); + const [showCreateChatbot, setShowCreateChatbot] = useState(false); + const [indexList, setIndexList] = useState(INITIAL_INDEX_LIST) + + const indexTypeOption:SelectedOption[] =[{label: "qq", value: "qq"}, {label: "qd", value: "qd"}, {label: "intention", value: "intention"}] + const navigate = useNavigate(); const getModelList = async (type: 'create' | 'edit') => { const tempModels:{label: string; value:string}[] =[] @@ -77,8 +112,6 @@ const ChatbotManagement: React.FC = () => { {"model_id": config?.embeddingEndpoint || "", "model_name": "BCE_Embedding"}, ] let embedding_models = EMBEDDING_MODEL_LIST - - // Check if config?.embeddingEndpoint starts with "bce-embedding-and-bge-reranker" if (config?.embeddingEndpoint?.startsWith("bce-embedding-and-bge-reranker")) { embedding_models = [...BCE_EMBEDDING, ...EMBEDDING_MODEL_LIST] } @@ -137,56 +170,17 @@ const ChatbotManagement: React.FC = () => { setLoadingSave(false); } }; - const editChatbot = async ()=>{ - setLoadingSave(true) - if(!qqIndex?.trim()){ - setQqIndexError(t('validation.requiredIndexName')); - setLoadingSave(false) - return; - } - if(!qdIndex?.trim()){ - setQdIndexError(t('validation.requiredIndexName')); - setLoadingSave(false) - return; - } - if(!intentionIndex?.trim()){ - setIntentionIndexError(t('validation.requiredIndexName')); - setLoadingSave(false) - return; - } - const indexIsValid = await isValidChatbot('edit') - - if(!indexIsValid.result){ - if(indexIsValid.item=="qq") { - setQqIndexError(t('validation.repeatIndex')) - } else if(indexIsValid.item=="qd") { - setQdIndexError(t('validation.repeatIndex')) - } else if(indexIsValid.item=="intention") { - setIntentionIndexError(t('validation.repeatIndex')) - } - setLoadingSave(false) - return; - } + const removeIndex =(removedIndex: number)=>{ + setIndexList(prevIndexList => + prevIndexList.filter((_, index) => index !== removedIndex) + ); - const editRes: CreEditChatbotResponse = await fetchData({ - url: `chatbot-management/chatbot/${selectedItems[0].ChatbotId}`, - method: 'post', - data: { - index: { - qq: qqIndex, - qd: qdIndex, - intention: intentionIndex - } - } - }); + } - if (editRes.Message === 'OK') { - setShowCreate(false); - setShowEdit(false); - getChatbotList(); - } - setLoadingSave(false); + const addIndex =()=>{ + setIndexList(prevIndexList => [...prevIndexList, {name:"", type:"qq", desc:t('defaultIndexDesc'), tag:"", errText:""}] + ); } const isValidChatbot = async (type:string) =>{ @@ -197,13 +191,46 @@ const ChatbotManagement: React.FC = () => { type, chatbotId: chatbotName, // groupName: selectedBotOption?.value, - index: {qq: qqIndex, qd: qdIndex, intention: intentionIndex}, + index: genBotIndexCheck(), model: modelOption?.value }, }); // return } + + const genBotIndexCheck = ()=>{ + let index:any={} + indexList.map((item: INDEX_TYPE)=>{ + if (!index[item.type]) { + index[item.type] = ""; + } + index[item.type] += item.name + ","; + }); + for (let type in index) { + index[type] = index[type].slice(0, -1); + } + + return index + } + + const genBotIndexCreate = ()=>{ + // index:{ + // qq: {name: qqIndex, desc: qqIndexDesc}, + // qd: {name: qdIndex, desc: qdIndexDesc}, + // intention: {name: intentionIndex, desc: intentionIndexDesc} + // } + let index:any={} + indexList.map((item: INDEX_TYPE)=>{ + if (!index[item.type]) { + index[item.type] = {}; + } + index[item.type][item.name] = item.desc; + }); + return index + } const createChatbot = async () => { + + let staticCheck = true // validate model settings if (!modelOption?.value?.trim()) { setModelError(t('validation.requireModel')); @@ -215,20 +242,37 @@ const ChatbotManagement: React.FC = () => { return; } + + if(!useDefaultIndex){ - if(!qqIndex?.trim()){ - setQqIndexError(t('validation.requiredIndexName')); - return; - } - if(!qdIndex?.trim()){ - setQdIndexError(t('validation.requiredIndexName')); - return; - } - if(!intentionIndex?.trim()){ - setIntentionIndexError(t('validation.requiredIndexName')); - return; - } + const validIndexNames: string[] = [] + setIndexList((prevIndexList) => + prevIndexList.map((item) => { + if(item.name?.trim().length === 0){ + staticCheck = false + return { + ...item, + errText:t('validation.requiredIndexName') + }; + } else if(validIndexNames.includes(item.name)) { + staticCheck = false + return { + ...item, + errText:t('validation.repeatedIndexName') + }; + } else { + validIndexNames.push(item.name) + return item + } + }) + ); + if(!staticCheck) return; + + + } + + setLoadingSave(true); @@ -237,20 +281,17 @@ const ChatbotManagement: React.FC = () => { if(!indexIsValid.result){ if(indexIsValid.item=="chatbotName"){ setChatbotNameError(t('validation.repeatChatbotName')) - } else if(indexIsValid.item=="qq") { - setQqIndexError(t('validation.repeatIndex')) - } else if(indexIsValid.item=="qd") { - setQdIndexError(t('validation.repeatIndex')) - } else if(indexIsValid.item=="intention") { - setIntentionIndexError(t('validation.repeatIndex')) + } else { + setIndexList((prevIndexList) => + prevIndexList.map((item) => { + return item.name == indexIsValid.item ? { ...item, errText: indexIsValid.reason==1?t('indexUsedByBot'):t('indexUsedByModel') } : item; + }) + ); } setLoadingSave(false) return; } try { - // if (type === 'create' && currentChatbot) { - // currentChatbot.ChatbotId = createChatbotId; - // } const createRes: CreEditChatbotResponse = await fetchData({ url: 'chatbot-management/chatbots', method: 'post', @@ -258,11 +299,8 @@ const ChatbotManagement: React.FC = () => { chatbotId: chatbotName, modelId: modelOption.value, modelName: modelOption.label, - index:{ - qq: qqIndex, - qd: qdIndex, - intention: intentionIndex - } + index: genBotIndexCreate() + }, }); // const createRes: CreateChatbotResponse = data; @@ -277,19 +315,6 @@ const ChatbotManagement: React.FC = () => { } }; - // const handleChatbotChange = (key: string, subKey: string, value: string) => { - // setCurrentChatbot((prevData: any) => ({ - // ...prevData, - // Chatbot: { - // ...prevData.Chatbot, - // [key]: { - // ...prevData.Chatbot[key], - // [subKey]: value, - // }, - // }, - // })); - // }; - useEffect(() => { getChatbotList(); }, []); @@ -303,10 +328,23 @@ const ChatbotManagement: React.FC = () => { useEffect(()=>{ if(chatbotName?.trim()!==""){ if(useDefaultIndex){ - setQdIndex(`${chatbotName}-qd-default`); - setQqIndex(`${chatbotName}-qq-default`); - setIntentionIndex(`${chatbotName}-intention-default`); - }} + setIndexList( + // prevIndexList => + INITIAL_INDEX_LIST.map(item => ({ + ...item, + name: `${chatbotName}-${item.type}-default`, + })) + ); + // setQdIndex(`${chatbotName}-qd-default`); + // setQqIndex(`${chatbotName}-qq-default`); + // setIntentionIndex(`${chatbotName}-intention-default`); + // setQdIndexDesc(t('defaultIndexDesc')); + // setQqIndexDesc(t('defaultIndexDesc')); + // setIntentionIndexDesc(t('defaultIndexDesc')); + } + } else{ + setIndexList(INITIAL_INDEX_LIST) + } setQdIndexError(''); setQqIndexError(''); @@ -315,11 +353,43 @@ const ChatbotManagement: React.FC = () => { ,[chatbotName, useDefaultIndex]) - // useEffect(() => { - // if (showCreate && modelOption) { - // getChatbotById('create'); - // } - // }, [modelOption]); + + const changeIndexName =(value: string, index: number)=>{ + setIndexList(prevIndexList => + prevIndexList.map((item,i) => { + if(i===index){return { + ...item, + name: value, + errText:'' + }} else { + return item + }}) + ); + } + + const changeIndexType =(value: string, index: number)=>{ + setIndexList(prevIndexList => + prevIndexList.map((item,i) => { + if(i===index){return { + ...item, + type: value + }} else { + return item + }}) + ); + } + + const changeIndexDesc =(value: string, index: number)=>{ + setIndexList(prevIndexList => + prevIndexList.map((item,i) => { + if(i===index){return { + ...item, + desc: value + }} else { + return item + }}) + ); + } return ( { getChatbotList(); }} /> - {/* { - if (detail.id === 'delete') { - setShowDelete(true); - } + // if (detail.id === 'delete') { + // setShowDelete(true); + // } if (detail.id === 'edit') { - getChatbotById(); + // getChatbotById(); + navigate(`/chatbot/detail/${selectedItems[0].ChatbotId}`) + } }} items={[ { text: t('button.edit'), id: 'edit' }, - { text: t('button.delete'), id: 'delete'}, + // { text: t('button.delete'), id: 'delete'}, ]} > {t('button.action')} - */} + - {showEdit ? ( - - ) : ( - - )} - - - } - header={showCreate?t('button.createChatbot'):t('button.editChatbot')} - > - + +
+ + + + +
+ } + > +
+ { onChange={({ detail }) => { setChatbotNameError(''); setChatbotName(detail.value); - }} /> - {/*