From f37a1f2a81ab7365623cad707b8f36a58c83546b Mon Sep 17 00:00:00 2001 From: Shaojun Liu <61072813+liu-shaojun@users.noreply.github.com> Date: Tue, 9 Apr 2024 17:41:17 +0800 Subject: [PATCH] Upgrade to python 3.11 (#10711) * create conda env with python 3.11 * recommend to use Python 3.11 * update --- .../source/doc/LLM/Overview/install_cpu.md | 6 ++--- .../source/doc/LLM/Overview/install_gpu.md | 24 +++++++++---------- .../doc/LLM/Quickstart/continue_quickstart.md | 2 +- .../doc/LLM/Quickstart/install_linux_gpu.md | 2 +- .../doc/LLM/Quickstart/install_windows_gpu.md | 2 +- .../LLM/Quickstart/llama_cpp_quickstart.md | 2 +- .../CPU/Applications/autogen/README.md | 2 +- .../CPU/Applications/hf-agent/README.md | 2 +- .../CPU/Applications/streaming-llm/README.md | 2 +- .../example/CPU/Deepspeed-AutoTP/README.md | 2 +- .../Advanced-Quantizations/AWQ/README.md | 2 +- .../Advanced-Quantizations/GGUF/README.md | 2 +- .../Advanced-Quantizations/GPTQ/README.md | 2 +- .../Model/aquila/README.md | 2 +- .../Model/aquila2/README.md | 2 +- .../Model/baichuan/README.md | 2 +- .../Model/baichuan2/README.md | 2 +- .../Model/bluelm/README.md | 2 +- .../Model/chatglm/README.md | 2 +- .../Model/chatglm2/README.md | 4 ++-- .../Model/chatglm3/README.md | 4 ++-- .../Model/codellama/README.md | 2 +- .../Model/codeshell/README.md | 2 +- .../Model/deciLM-7b/README.md | 2 +- .../Model/deepseek-moe/README.md | 2 +- .../Model/deepseek/README.md | 2 +- .../Model/distil-whisper/README.md | 2 +- .../Model/dolly_v1/README.md | 2 +- .../Model/dolly_v2/README.md | 2 +- .../Model/falcon/README.md | 2 +- .../Model/flan-t5/README.md | 2 +- .../Model/fuyu/README.md | 2 +- .../Model/gemma/README.md | 4 ++-- .../Model/internlm-xcomposer/README.md | 2 +- .../Model/internlm/README.md | 2 +- .../Model/internlm2/README.md | 2 +- .../Model/llama2/README.md | 2 +- .../Model/mistral/README.md | 2 +- .../Model/mixtral/README.md | 2 +- .../Model/moss/README.md | 2 +- .../Model/mpt/README.md | 2 +- .../Model/phi-1_5/README.md | 2 +- .../Model/phi-2/README.md | 2 +- .../Model/phixtral/README.md | 2 +- .../Model/phoenix/README.md | 2 +- .../Model/qwen-vl/README.md | 2 +- .../Model/qwen/README.md | 2 +- .../Model/qwen1.5/README.md | 2 +- .../Model/redpajama/README.md | 2 +- .../Model/replit/README.md | 2 +- .../Model/skywork/README.md | 2 +- .../Model/solar/README.md | 2 +- .../Model/stablelm/README.md | 2 +- .../Model/starcoder/README.md | 2 +- .../Model/vicuna/README.md | 2 +- .../Model/whisper/readme.md | 4 ++-- .../Model/wizardcoder-python/README.md | 2 +- .../Model/yi/README.md | 2 +- .../Model/yuan2/README.md | 2 +- .../Model/ziya/README.md | 2 +- .../More-Data-Types/README.md | 2 +- .../Save-Load/README.md | 2 +- .../example/CPU/ModelScope-Models/README.md | 2 +- .../llm/example/CPU/Native-Models/README.md | 2 +- .../PyTorch-Models/Model/aquila2/README.md | 2 +- .../CPU/PyTorch-Models/Model/bark/README.md | 2 +- .../CPU/PyTorch-Models/Model/bert/README.md | 2 +- .../CPU/PyTorch-Models/Model/bluelm/README.md | 2 +- .../PyTorch-Models/Model/chatglm/README.md | 2 +- .../PyTorch-Models/Model/chatglm3/README.md | 2 +- .../PyTorch-Models/Model/codellama/README.md | 2 +- .../PyTorch-Models/Model/codeshell/README.md | 2 +- .../PyTorch-Models/Model/deciLM-7b/README.md | 2 +- .../Model/deepseek-moe/README.md | 2 +- .../PyTorch-Models/Model/deepseek/README.md | 2 +- .../Model/distil-whisper/README.md | 2 +- .../PyTorch-Models/Model/flan-t5/README.md | 2 +- .../CPU/PyTorch-Models/Model/fuyu/README.md | 2 +- .../Model/internlm-xcomposer/README.md | 2 +- .../PyTorch-Models/Model/internlm2/README.md | 2 +- .../CPU/PyTorch-Models/Model/llama2/README.md | 2 +- .../CPU/PyTorch-Models/Model/llava/README.md | 2 +- .../CPU/PyTorch-Models/Model/mamba/README.md | 2 +- .../PyTorch-Models/Model/meta-llama/README.md | 2 +- .../PyTorch-Models/Model/mistral/README.md | 2 +- .../PyTorch-Models/Model/mixtral/README.md | 2 +- .../Model/openai-whisper/readme.md | 2 +- .../PyTorch-Models/Model/phi-1_5/README.md | 2 +- .../CPU/PyTorch-Models/Model/phi-2/README.md | 2 +- .../PyTorch-Models/Model/phixtral/README.md | 2 +- .../PyTorch-Models/Model/qwen-vl/README.md | 2 +- .../PyTorch-Models/Model/qwen1.5/README.md | 2 +- .../PyTorch-Models/Model/skywork/README.md | 2 +- .../CPU/PyTorch-Models/Model/solar/README.md | 2 +- .../PyTorch-Models/Model/stablelm/README.md | 2 +- .../Model/wizardcoder-python/README.md | 2 +- .../CPU/PyTorch-Models/Model/yi/README.md | 2 +- .../CPU/PyTorch-Models/Model/yuan2/README.md | 2 +- .../CPU/PyTorch-Models/Model/ziya/README.md | 2 +- .../PyTorch-Models/More-Data-Types/README.md | 2 +- .../CPU/PyTorch-Models/Save-Load/README.md | 2 +- .../example/CPU/QLoRA-FineTuning/README.md | 2 +- .../QLoRA-FineTuning/alpaca-qlora/README.md | 2 +- .../Speculative-Decoding/baichuan2/README.md | 2 +- .../Speculative-Decoding/chatglm3/README.md | 2 +- .../CPU/Speculative-Decoding/llama2/README.md | 2 +- .../Speculative-Decoding/mistral/README.md | 2 +- .../CPU/Speculative-Decoding/qwen/README.md | 2 +- .../Speculative-Decoding/starcoder/README.md | 2 +- .../CPU/Speculative-Decoding/vicuna/README.md | 2 +- .../CPU/Speculative-Decoding/ziya/README.md | 2 +- python/llm/example/CPU/vLLM-Serving/README.md | 2 +- .../GPU/Applications/autogen/README.md | 2 +- .../GPU/Applications/streaming-llm/README.md | 2 +- .../example/GPU/Deepspeed-AutoTP/README.md | 2 +- .../Advanced-Quantizations/AWQ/README.md | 2 +- .../Advanced-Quantizations/GGUF-IQ2/README.md | 2 +- .../Advanced-Quantizations/GGUF/README.md | 2 +- .../Advanced-Quantizations/GPTQ/README.md | 2 +- .../Model/aquila/README.md | 4 ++-- .../Model/aquila2/README.md | 4 ++-- .../Model/baichuan/README.md | 4 ++-- .../Model/baichuan2/README.md | 4 ++-- .../Model/bluelm/README.md | 4 ++-- .../Model/chatglm2/README.md | 8 +++---- .../Model/chatglm3/README.md | 8 +++---- .../Model/chinese-llama2/README.md | 4 ++-- .../Model/codellama/readme.md | 4 ++-- .../Model/deciLM-7b/README.md | 4 ++-- .../Model/deepseek/README.md | 4 ++-- .../Model/distil-whisper/README.md | 4 ++-- .../Model/dolly-v1/README.md | 4 ++-- .../Model/dolly-v2/README.md | 4 ++-- .../Model/falcon/README.md | 4 ++-- .../Model/flan-t5/README.md | 4 ++-- .../Model/gemma/README.md | 4 ++-- .../Model/gpt-j/readme.md | 4 ++-- .../Model/internlm/README.md | 4 ++-- .../Model/internlm2/README.md | 4 ++-- .../Model/llama2/README.md | 4 ++-- .../Model/mistral/README.md | 4 ++-- .../Model/mixtral/README.md | 4 ++-- .../Model/mpt/README.md | 4 ++-- .../Model/phi-1_5/README.md | 4 ++-- .../Model/phi-2/README.md | 4 ++-- .../Model/phixtral/README.md | 4 ++-- .../Model/qwen-vl/README.md | 4 ++-- .../Model/qwen/README.md | 4 ++-- .../Model/qwen1.5/README.md | 4 ++-- .../Model/redpajama/README.md | 4 ++-- .../Model/replit/README.md | 4 ++-- .../Model/rwkv4/README.md | 4 ++-- .../Model/rwkv5/README.md | 4 ++-- .../Model/solar/README.md | 4 ++-- .../Model/stablelm/README.md | 4 ++-- .../Model/starcoder/readme.md | 4 ++-- .../Model/vicuna/README.md | 4 ++-- .../Model/voiceassistant/README.md | 4 ++-- .../Model/whisper/readme.md | 4 ++-- .../Model/yi/README.md | 4 ++-- .../Model/yuan2/README.md | 4 ++-- .../More-Data-Types/README.md | 2 +- .../Save-Load/README.md | 4 ++-- .../example/GPU/LLM-Finetuning/DPO/README.md | 2 +- .../GPU/LLM-Finetuning/HF-PEFT/README.md | 2 +- .../example/GPU/LLM-Finetuning/LoRA/README.md | 2 +- .../GPU/LLM-Finetuning/QA-LoRA/README.md | 2 +- .../QLoRA/alpaca-qlora/README.md | 2 +- .../QLoRA/simple-example/README.md | 2 +- .../QLoRA/trl-example/README.md | 2 +- .../GPU/LLM-Finetuning/ReLora/README.md | 2 +- .../GPU/Long-Context/LLaMA2-32K/README.md | 4 ++-- .../example/GPU/ModelScope-Models/README.md | 4 ++-- .../GPU/ModelScope-Models/Save-Load/README.md | 4 ++-- .../GPU/Pipeline-Parallel-Inference/README.md | 2 +- .../PyTorch-Models/Model/aquila2/README.md | 4 ++-- .../PyTorch-Models/Model/baichuan/README.md | 4 ++-- .../PyTorch-Models/Model/baichuan2/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/bark/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/bluelm/README.md | 4 ++-- .../PyTorch-Models/Model/chatglm2/README.md | 8 +++---- .../PyTorch-Models/Model/chatglm3/README.md | 8 +++---- .../PyTorch-Models/Model/codellama/README.md | 4 ++-- .../PyTorch-Models/Model/deciLM-7b/README.md | 4 ++-- .../PyTorch-Models/Model/deepseek/README.md | 4 ++-- .../Model/distil-whisper/README.md | 4 ++-- .../PyTorch-Models/Model/dolly-v1/README.md | 4 ++-- .../PyTorch-Models/Model/dolly-v2/README.md | 4 ++-- .../PyTorch-Models/Model/flan-t5/README.md | 4 ++-- .../PyTorch-Models/Model/internlm2/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/llama2/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/llava/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/mamba/README.md | 2 +- .../PyTorch-Models/Model/mistral/README.md | 4 ++-- .../PyTorch-Models/Model/mixtral/README.md | 4 ++-- .../PyTorch-Models/Model/phi-1_5/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/phi-2/README.md | 4 ++-- .../PyTorch-Models/Model/phixtral/README.md | 4 ++-- .../PyTorch-Models/Model/qwen-vl/README.md | 4 ++-- .../PyTorch-Models/Model/qwen1.5/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/replit/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/solar/README.md | 4 ++-- .../PyTorch-Models/Model/speech-t5/README.md | 4 ++-- .../PyTorch-Models/Model/stablelm/README.md | 4 ++-- .../PyTorch-Models/Model/starcoder/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/yi/README.md | 4 ++-- .../GPU/PyTorch-Models/Model/yuan2/README.md | 4 ++-- .../PyTorch-Models/More-Data-Types/README.md | 2 +- .../GPU/PyTorch-Models/Save-Load/README.md | 2 +- .../Speculative-Decoding/baichuan2/README.md | 2 +- .../Speculative-Decoding/chatglm3/README.md | 2 +- .../GPU/Speculative-Decoding/gpt-j/README.md | 2 +- .../GPU/Speculative-Decoding/llama2/README.md | 2 +- .../Speculative-Decoding/mistral/README.md | 2 +- .../GPU/Speculative-Decoding/qwen/README.md | 2 +- python/llm/example/GPU/vLLM-Serving/README.md | 2 +- python/llm/scripts/env-check.sh | 2 +- 217 files changed, 319 insertions(+), 319 deletions(-) diff --git a/docs/readthedocs/source/doc/LLM/Overview/install_cpu.md b/docs/readthedocs/source/doc/LLM/Overview/install_cpu.md index bb2b952ccdc..53342b77f51 100644 --- a/docs/readthedocs/source/doc/LLM/Overview/install_cpu.md +++ b/docs/readthedocs/source/doc/LLM/Overview/install_cpu.md @@ -17,7 +17,7 @@ Please refer to [Environment Setup](#environment-setup) for more information. .. important:: - ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11; Python 3.9 is recommended for best practices. + ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11; Python 3.11 is recommended for best practices. ``` ## Recommended Requirements @@ -39,10 +39,10 @@ Here list the recommended hardware and OS for smooth IPEX-LLM optimization exper For optimal performance with LLM models using IPEX-LLM optimizations on Intel CPUs, here are some best practices for setting up environment: -First we recommend using [Conda](https://docs.conda.io/en/latest/miniconda.html) to create a python 3.9 enviroment: +First we recommend using [Conda](https://docs.conda.io/en/latest/miniconda.html) to create a python 3.11 enviroment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/docs/readthedocs/source/doc/LLM/Overview/install_gpu.md b/docs/readthedocs/source/doc/LLM/Overview/install_gpu.md index 22f49e1f371..aead085168c 100644 --- a/docs/readthedocs/source/doc/LLM/Overview/install_gpu.md +++ b/docs/readthedocs/source/doc/LLM/Overview/install_gpu.md @@ -22,10 +22,10 @@ To apply Intel GPU acceleration, there're several prerequisite steps for tools i * Step 4: Install IntelĀ® oneAPI Base Toolkit 2024.0: - First, Create a Python 3.9 enviroment and activate it. In Anaconda Prompt: + First, Create a Python 3.11 enviroment and activate it. In Anaconda Prompt: ```cmd - conda create -n llm python=3.9 libuv + conda create -n llm python=3.11 libuv conda activate llm ``` @@ -33,7 +33,7 @@ To apply Intel GPU acceleration, there're several prerequisite steps for tools i ```eval_rst .. important:: - ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11. Python 3.9 is recommended for best practices. + ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11. Python 3.11 is recommended for best practices. ``` Then, use `pip` to install the Intel oneAPI Base Toolkit 2024.0: @@ -111,7 +111,7 @@ pip install --pre --upgrade ipex-llm[xpu] ```eval_rst .. note:: - All the wheel packages mentioned here are for Python 3.9. If you would like to use Python 3.10 or 3.11, you should modify the wheel names for ``torch``, ``torchvision``, and ``intel_extension_for_pytorch`` by replacing ``cp39`` with ``cp310`` or ``cp311``, respectively. + All the wheel packages mentioned here are for Python 3.11. If you would like to use Python 3.9 or 3.10, you should modify the wheel names for ``torch``, ``torchvision``, and ``intel_extension_for_pytorch`` by replacing ``cp39`` with ``cp310`` or ``cp311``, respectively. ``` ### Runtime Configuration @@ -164,7 +164,7 @@ If you met error when importing `intel_extension_for_pytorch`, please ensure tha * Ensure that `libuv` is installed in your conda environment. This can be done during the creation of the environment with the command: ```cmd - conda create -n llm python=3.9 libuv + conda create -n llm python=3.11 libuv ``` If you missed `libuv`, you can add it to your existing environment through ```cmd @@ -399,12 +399,12 @@ IPEX-LLM GPU support on Linux has been verified on: ### Install IPEX-LLM #### Install IPEX-LLM From PyPI -We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) to create a python 3.9 enviroment: +We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) to create a python 3.11 enviroment: ```eval_rst .. important:: - ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11. Python 3.9 is recommended for best practices. + ``ipex-llm`` is tested with Python 3.9, 3.10 and 3.11. Python 3.11 is recommended for best practices. ``` ```eval_rst @@ -422,7 +422,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t .. code-block:: bash - conda create -n llm python=3.9 + conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -439,7 +439,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t .. code-block:: bash - conda create -n llm python=3.9 + conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ @@ -461,7 +461,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t .. code-block:: bash - conda create -n llm python=3.9 + conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu_2.0] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -470,7 +470,7 @@ We recommend using [miniconda](https://docs.conda.io/en/latest/miniconda.html) t .. code-block:: bash - conda create -n llm python=3.9 + conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu_2.0] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ @@ -530,7 +530,7 @@ If you encounter network issues when installing IPEX, you can also install IPEX- ```eval_rst .. note:: - All the wheel packages mentioned here are for Python 3.9. If you would like to use Python 3.10 or 3.11, you should modify the wheel names for ``torch``, ``torchvision``, and ``intel_extension_for_pytorch`` by replacing ``cp39`` with ``cp310`` or ``cp311``, respectively. + All the wheel packages mentioned here are for Python 3.11. If you would like to use Python 3.9 or 3.10, you should modify the wheel names for ``torch``, ``torchvision``, and ``intel_extension_for_pytorch`` by replacing ``cp39`` with ``cp310`` or ``cp311``, respectively. ``` ### Runtime Configuration diff --git a/docs/readthedocs/source/doc/LLM/Quickstart/continue_quickstart.md b/docs/readthedocs/source/doc/LLM/Quickstart/continue_quickstart.md index 1f692f5b583..1370a233ee6 100644 --- a/docs/readthedocs/source/doc/LLM/Quickstart/continue_quickstart.md +++ b/docs/readthedocs/source/doc/LLM/Quickstart/continue_quickstart.md @@ -28,7 +28,7 @@ This guide walks you through setting up and running **Continue** within _Visual Visit [Run Text Generation WebUI Quickstart Guide](webui_quickstart.html), and follow the steps 1) [Install IPEX-LLM](https://ipex-llm.readthedocs.io/en/latest/doc/LLM/Quickstart/webui_quickstart.html#install-ipex-llm), 2) [Install WebUI](https://ipex-llm.readthedocs.io/en/latest/doc/LLM/Quickstart/webui_quickstart.html#install-the-webui) and 3) [Start the Server](https://ipex-llm.readthedocs.io/en/latest/doc/LLM/Quickstart/webui_quickstart.html#start-the-webui-server) to install and start the Text Generation WebUI API Service. **Please pay attention to below items during installation:** -- The Text Generation WebUI API service requires Python version 3.10 or higher. But [IPEX-LLM installation instructions](https://ipex-llm.readthedocs.io/en/latest/doc/LLM/Quickstart/webui_quickstart.html#install-ipex-llm) used ``python=3.9`` as default for creating the conda environment. We recommend changing it to ``3.11``, using below command: +- The Text Generation WebUI API service requires Python version 3.10 or higher. But [IPEX-LLM installation instructions](https://ipex-llm.readthedocs.io/en/latest/doc/LLM/Quickstart/webui_quickstart.html#install-ipex-llm) used ``python=3.11`` as default for creating the conda environment. We recommend changing it to ``3.11``, using below command: ```bash conda create -n llm python=3.11 libuv ``` diff --git a/docs/readthedocs/source/doc/LLM/Quickstart/install_linux_gpu.md b/docs/readthedocs/source/doc/LLM/Quickstart/install_linux_gpu.md index 157e03f4cd5..efcf95b1403 100644 --- a/docs/readthedocs/source/doc/LLM/Quickstart/install_linux_gpu.md +++ b/docs/readthedocs/source/doc/LLM/Quickstart/install_linux_gpu.md @@ -144,7 +144,7 @@ You can use `conda --version` to verify you conda installation. After installation, create a new python environment `llm`: ```cmd -conda create -n llm python=3.9 +conda create -n llm python=3.11 ``` Activate the newly created environment `llm`: ```cmd diff --git a/docs/readthedocs/source/doc/LLM/Quickstart/install_windows_gpu.md b/docs/readthedocs/source/doc/LLM/Quickstart/install_windows_gpu.md index 14439e70a1a..6a0c2e78baf 100644 --- a/docs/readthedocs/source/doc/LLM/Quickstart/install_windows_gpu.md +++ b/docs/readthedocs/source/doc/LLM/Quickstart/install_windows_gpu.md @@ -57,7 +57,7 @@ Visit [Miniconda installation page](https://docs.anaconda.com/free/miniconda/), Open the **Anaconda Prompt**. Then create a new python environment `llm` and activate it: ```cmd -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm ``` diff --git a/docs/readthedocs/source/doc/LLM/Quickstart/llama_cpp_quickstart.md b/docs/readthedocs/source/doc/LLM/Quickstart/llama_cpp_quickstart.md index 6c0c6784633..4736b6dc64c 100644 --- a/docs/readthedocs/source/doc/LLM/Quickstart/llama_cpp_quickstart.md +++ b/docs/readthedocs/source/doc/LLM/Quickstart/llama_cpp_quickstart.md @@ -26,7 +26,7 @@ Visit the [Install IPEX-LLM on Windows with Intel GPU Guide](https://ipex-llm.re To use `llama.cpp` with IPEX-LLM, first ensure that `ipex-llm[cpp]` is installed. ```cmd -conda create -n llm-cpp python=3.9 +conda create -n llm-cpp python=3.11 conda activate llm-cpp pip install --pre --upgrade ipex-llm[cpp] ``` diff --git a/python/llm/example/CPU/Applications/autogen/README.md b/python/llm/example/CPU/Applications/autogen/README.md index ceb9fd7a5e8..de04551089a 100644 --- a/python/llm/example/CPU/Applications/autogen/README.md +++ b/python/llm/example/CPU/Applications/autogen/README.md @@ -11,7 +11,7 @@ mkdir autogen cd autogen # create respective conda environment -conda create -n autogen python=3.9 +conda create -n autogen python=3.11 conda activate autogen # install fastchat-adapted ipex-llm diff --git a/python/llm/example/CPU/Applications/hf-agent/README.md b/python/llm/example/CPU/Applications/hf-agent/README.md index edbae072b01..455f10edda4 100644 --- a/python/llm/example/CPU/Applications/hf-agent/README.md +++ b/python/llm/example/CPU/Applications/hf-agent/README.md @@ -10,7 +10,7 @@ To run this example with IPEX-LLM, we have some recommended requirements for you ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/Applications/streaming-llm/README.md b/python/llm/example/CPU/Applications/streaming-llm/README.md index a008b1d29e7..571f51a318f 100644 --- a/python/llm/example/CPU/Applications/streaming-llm/README.md +++ b/python/llm/example/CPU/Applications/streaming-llm/README.md @@ -10,7 +10,7 @@ model = AutoModelForCausalLM.from_pretrained(model_name_or_path, load_in_4bit=Tr ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] diff --git a/python/llm/example/CPU/Deepspeed-AutoTP/README.md b/python/llm/example/CPU/Deepspeed-AutoTP/README.md index ed73856714a..4525656384a 100644 --- a/python/llm/example/CPU/Deepspeed-AutoTP/README.md +++ b/python/llm/example/CPU/Deepspeed-AutoTP/README.md @@ -2,7 +2,7 @@ #### 1. Install Dependencies -Install necessary packages (here Python 3.9 is our test environment): +Install necessary packages (here Python 3.11 is our test environment): ```bash bash install.sh diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md index cecbe84aa60..b3078cbdb89 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md @@ -34,7 +34,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a AWQ We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install autoawq==0.1.8 --no-deps diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md index 33c28850cbc..4741e604a56 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md @@ -25,7 +25,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md index d91f997e5d2..139fa01431d 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila/README.md index 63468b19455..8b3cfbf37aa 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila2/README.md index 50e7b83dd0c..fd06613cbb0 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/aquila2/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan/README.md index b7ed859e44f..6b8d421d206 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Baic ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan2/README.md index e5d9a1aad94..e9e2820048a 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/baichuan2/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Baic ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/bluelm/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/bluelm/README.md index addec52fd36..328a86b7d2d 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/bluelm/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/bluelm/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Blue ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md index d56d070ec4b..09172bcb6a4 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm2/README.md index 54acc3b6b79..8a99eebee29 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option @@ -80,7 +80,7 @@ In the example [streamchat.py](./streamchat.py), we show a basic use case for a ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm3/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm3/README.md index 966f089484a..4b5f2174faa 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm3/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/chatglm3/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option @@ -81,7 +81,7 @@ In the example [streamchat.py](./streamchat.py), we show a basic use case for a ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codellama/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codellama/README.md index be3687cfd1e..100350512c8 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codellama/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codellama/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Code ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codeshell/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codeshell/README.md index 59c935c5adb..a3399ab85f1 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codeshell/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/codeshell/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md index 420627c5b6e..ac818695d0d 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Deci ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek-moe/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek-moe/README.md index ece21c6f8c4..3fd87ae7d74 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek-moe/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek-moe/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek/README.md index 232ca8be1db..e38600b7794 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/deepseek/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Deep ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md index 882671c60bb..92d863b16c9 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v1/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v1/README.md index d59677ba3f4..1e599b4e4f5 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v1/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v1/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Doll ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v2/README.md index 219e13ee677..b06f61cca12 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/dolly_v2/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Doll ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/falcon/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/falcon/README.md index 20a19a76bee..ca7b5f4576c 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/falcon/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/falcon/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Falc ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/flan-t5/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/flan-t5/README.md index 2d102180afe..2daa684f6e9 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/flan-t5/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/flan-t5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/fuyu/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/fuyu/README.md index e54a854698c..8bf15bd15ea 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/fuyu/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/fuyu/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/gemma/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/gemma/README.md index 548529c8a94..c8572e0431f 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/gemma/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/gemma/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.38.1 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm-xcomposer/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm-xcomposer/README.md index cb898b32c92..97235dd6f04 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm-xcomposer/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm-xcomposer/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm/README.md index b37e342c1f2..e994db9e018 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm2/README.md index c7d8022a29e..01f399b9005 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/internlm2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/llama2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/llama2/README.md index 191102ebf34..68415979b55 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/llama2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/llama2/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mistral/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mistral/README.md index 40fbd43dd5c..d27fc1e776d 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mistral/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mistral/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mixtral/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mixtral/README.md index edd46b62203..0f9ce865b9c 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mixtral/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mixtral/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install PyTorch CPU as default diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/moss/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/moss/README.md index a0eeeccb4d1..0355daa9343 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/moss/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/moss/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a MOSS ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mpt/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mpt/README.md index e70aa2ace80..5efb7172d9a 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mpt/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/mpt/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an MPT ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md index 7d9ece5bf50..e92d306bcb0 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-2/README.md index caf033f3ba5..10cebf030bc 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phi-2/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phixtral/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phixtral/README.md index 918c081ab7b..2696aeb3838 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phixtral/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phixtral/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phoenix/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phoenix/README.md index 601eb997ebc..9b162d2f43e 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phoenix/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/phoenix/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Phoe ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md index bd1b66d486d..16f5243c0ca 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen/README.md index ce689b6f3df..c94d76a3e53 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen/README.md @@ -15,7 +15,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Qwen We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md index 52037de5476..e4043709085 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Qwen ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/redpajama/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/redpajama/README.md index 0692286f0db..0e9e0c381b2 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/redpajama/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/redpajama/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a RedP ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/replit/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/replit/README.md index 0ce3bbed85e..285b8040712 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/replit/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/replit/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/skywork/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/skywork/README.md index 53b790f27b5..75f81fd82d8 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/skywork/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/skywork/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Skyw ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/solar/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/solar/README.md index cdfe9b8fe01..51c1a6b6b99 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/solar/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/solar/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a SOLA ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/stablelm/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/stablelm/README.md index 5d99e902c93..d3e9854a9fc 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/stablelm/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/stablelm/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/starcoder/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/starcoder/README.md index d81e438bbf9..20cc936fdc4 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/starcoder/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/starcoder/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an Sta ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/vicuna/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/vicuna/README.md index 89604bc6f86..9ed7ac159e7 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/vicuna/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/vicuna/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Vicu ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/whisper/readme.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/whisper/readme.md index d2e957e67cb..29f72a29c20 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/whisper/readme.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/whisper/readme.md @@ -10,7 +10,7 @@ In the example [recognize.py](./recognize.py), we show a basic use case for a Wh ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option @@ -66,7 +66,7 @@ In the example [long-segment-recognize.py](./long-segment-recognize.py), we show ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/wizardcoder-python/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/wizardcoder-python/README.md index 25d6f20e819..1801214a8a5 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/wizardcoder-python/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/wizardcoder-python/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Wiza ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yi/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yi/README.md index 2205a4aff14..829af83f782 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yi/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yi/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yuan2/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yuan2/README.md index 05f7a32f597..96c08614367 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yuan2/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/yuan2/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/ziya/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/ziya/README.md index 2dfb7adc559..9d1fa08cd8c 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Model/ziya/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Model/ziya/README.md @@ -16,7 +16,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/More-Data-Types/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/More-Data-Types/README.md index d5dc789c024..93284b2ed1f 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/More-Data-Types/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/More-Data-Types/README.md @@ -5,7 +5,7 @@ In this example, we show a pipeline to apply IPEX-LLM low-bit optimizations (inc ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] diff --git a/python/llm/example/CPU/HF-Transformers-AutoModels/Save-Load/README.md b/python/llm/example/CPU/HF-Transformers-AutoModels/Save-Load/README.md index d5dc789c024..93284b2ed1f 100644 --- a/python/llm/example/CPU/HF-Transformers-AutoModels/Save-Load/README.md +++ b/python/llm/example/CPU/HF-Transformers-AutoModels/Save-Load/README.md @@ -5,7 +5,7 @@ In this example, we show a pipeline to apply IPEX-LLM low-bit optimizations (inc ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] diff --git a/python/llm/example/CPU/ModelScope-Models/README.md b/python/llm/example/CPU/ModelScope-Models/README.md index 8be1159d8bf..d416a8ea7f6 100644 --- a/python/llm/example/CPU/ModelScope-Models/README.md +++ b/python/llm/example/CPU/ModelScope-Models/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/Native-Models/README.md b/python/llm/example/CPU/Native-Models/README.md index 8a181ce6685..1a2d80a8bf7 100644 --- a/python/llm/example/CPU/Native-Models/README.md +++ b/python/llm/example/CPU/Native-Models/README.md @@ -7,7 +7,7 @@ In this example, we show a pipeline to convert a large language model to IPEX-LL ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] diff --git a/python/llm/example/CPU/PyTorch-Models/Model/aquila2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/aquila2/README.md index 67189d6074f..2c9cd0083f3 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/aquila2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/aquila2/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/bark/README.md b/python/llm/example/CPU/PyTorch-Models/Model/bark/README.md index 5800acae5cb..ba4f4282e4c 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/bark/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/bark/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/bert/README.md b/python/llm/example/CPU/PyTorch-Models/Model/bert/README.md index 2bbbe6263c0..5bfe4e00f32 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/bert/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/bert/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/bluelm/README.md b/python/llm/example/CPU/PyTorch-Models/Model/bluelm/README.md index 437d9834d65..a68f2cb8990 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/bluelm/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/bluelm/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md b/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md index 35a1562098e..be040a031cc 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/chatglm/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/chatglm3/README.md b/python/llm/example/CPU/PyTorch-Models/Model/chatglm3/README.md index 195fb0ee936..3ee550a4eb6 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/chatglm3/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/chatglm3/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/codellama/README.md b/python/llm/example/CPU/PyTorch-Models/Model/codellama/README.md index a97c5bb8976..9915ffd9be4 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/codellama/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/codellama/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/codeshell/README.md b/python/llm/example/CPU/PyTorch-Models/Model/codeshell/README.md index 1870c4de351..dff6f8e87f1 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/codeshell/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/codeshell/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/deciLM-7b/README.md b/python/llm/example/CPU/PyTorch-Models/Model/deciLM-7b/README.md index 15dca0bd3a4..bf92a5b622a 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/deciLM-7b/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/deciLM-7b/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/deepseek-moe/README.md b/python/llm/example/CPU/PyTorch-Models/Model/deepseek-moe/README.md index feca7acf21d..fa9b99458a8 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/deepseek-moe/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/deepseek-moe/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/deepseek/README.md b/python/llm/example/CPU/PyTorch-Models/Model/deepseek/README.md index bbbb304e69a..9e86fa272c5 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/deepseek/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/deepseek/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/distil-whisper/README.md b/python/llm/example/CPU/PyTorch-Models/Model/distil-whisper/README.md index 56efd231f0d..ff777d77698 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/distil-whisper/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/distil-whisper/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/flan-t5/README.md b/python/llm/example/CPU/PyTorch-Models/Model/flan-t5/README.md index 2d102180afe..2daa684f6e9 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/flan-t5/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/flan-t5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/fuyu/README.md b/python/llm/example/CPU/PyTorch-Models/Model/fuyu/README.md index e54a854698c..8bf15bd15ea 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/fuyu/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/fuyu/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/internlm-xcomposer/README.md b/python/llm/example/CPU/PyTorch-Models/Model/internlm-xcomposer/README.md index cedaab0494b..eda342d80c5 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/internlm-xcomposer/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/internlm-xcomposer/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/internlm2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/internlm2/README.md index 7e55c5f3197..f8c1ff8c28a 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/internlm2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/internlm2/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/llama2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/llama2/README.md index a630cc0cc83..2227e0dcd2e 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/llama2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/llama2/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/llava/README.md b/python/llm/example/CPU/PyTorch-Models/Model/llava/README.md index d9b2b8539e7..db7cec5b904 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/llava/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/llava/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/mamba/README.md b/python/llm/example/CPU/PyTorch-Models/Model/mamba/README.md index d649ffdb5be..5950791febe 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/mamba/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/mamba/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/meta-llama/README.md b/python/llm/example/CPU/PyTorch-Models/Model/meta-llama/README.md index e3c040fafc6..4c0ccb2037f 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/meta-llama/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/meta-llama/README.md @@ -10,7 +10,7 @@ In the example [example_chat_completion.py](./example_chat_completion.py), we sh ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # Install meta-llama repository diff --git a/python/llm/example/CPU/PyTorch-Models/Model/mistral/README.md b/python/llm/example/CPU/PyTorch-Models/Model/mistral/README.md index 1f95826794b..8a4adbcdf53 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/mistral/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/mistral/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/mixtral/README.md b/python/llm/example/CPU/PyTorch-Models/Model/mixtral/README.md index 7baa9a4c828..bc8ee08e7f4 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/mixtral/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/mixtral/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install PyTorch CPU as default diff --git a/python/llm/example/CPU/PyTorch-Models/Model/openai-whisper/readme.md b/python/llm/example/CPU/PyTorch-Models/Model/openai-whisper/readme.md index 85f6594ab4f..a1def7118ef 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/openai-whisper/readme.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/openai-whisper/readme.md @@ -10,7 +10,7 @@ In the example [recognize.py](./recognize.py), we show a basic use case for a Wh ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/phi-1_5/README.md b/python/llm/example/CPU/PyTorch-Models/Model/phi-1_5/README.md index 236cee376df..3b4dfac1001 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/phi-1_5/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/phi-1_5/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/phi-2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/phi-2/README.md index c9e8daaf049..81355b620e1 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/phi-2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/phi-2/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/phixtral/README.md b/python/llm/example/CPU/PyTorch-Models/Model/phixtral/README.md index c3a190318fe..9f824fad5e7 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/phixtral/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/phixtral/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/qwen-vl/README.md b/python/llm/example/CPU/PyTorch-Models/Model/qwen-vl/README.md index 57ccdf71e87..0e2c21cf33d 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/qwen-vl/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/qwen-vl/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/qwen1.5/README.md b/python/llm/example/CPU/PyTorch-Models/Model/qwen1.5/README.md index a404cf03cd9..095ee0011de 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/qwen1.5/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/qwen1.5/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/skywork/README.md b/python/llm/example/CPU/PyTorch-Models/Model/skywork/README.md index 1221f2a3098..b1b2140737e 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/skywork/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/skywork/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/solar/README.md b/python/llm/example/CPU/PyTorch-Models/Model/solar/README.md index 0625fb2fdf7..44c2ae4b2c4 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/solar/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/solar/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/stablelm/README.md b/python/llm/example/CPU/PyTorch-Models/Model/stablelm/README.md index 6332f06383a..8934e3f8704 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/stablelm/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/stablelm/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/wizardcoder-python/README.md b/python/llm/example/CPU/PyTorch-Models/Model/wizardcoder-python/README.md index 7cfa8d11a9a..e4f99c474de 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/wizardcoder-python/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/wizardcoder-python/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/yi/README.md b/python/llm/example/CPU/PyTorch-Models/Model/yi/README.md index cb4d06a94f8..89adf93a4a2 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/yi/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/yi/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/yuan2/README.md b/python/llm/example/CPU/PyTorch-Models/Model/yuan2/README.md index c268f7a3e0a..3627e815703 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/yuan2/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/yuan2/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Model/ziya/README.md b/python/llm/example/CPU/PyTorch-Models/Model/ziya/README.md index 2a77221ad75..79ac293d777 100644 --- a/python/llm/example/CPU/PyTorch-Models/Model/ziya/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Model/ziya/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/More-Data-Types/README.md b/python/llm/example/CPU/PyTorch-Models/More-Data-Types/README.md index 461cb9834c0..4bbfb55e0e8 100644 --- a/python/llm/example/CPU/PyTorch-Models/More-Data-Types/README.md +++ b/python/llm/example/CPU/PyTorch-Models/More-Data-Types/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case of low-bit ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/PyTorch-Models/Save-Load/README.md b/python/llm/example/CPU/PyTorch-Models/Save-Load/README.md index ae8c0302e3d..f3bbb5cf865 100644 --- a/python/llm/example/CPU/PyTorch-Models/Save-Load/README.md +++ b/python/llm/example/CPU/PyTorch-Models/Save-Load/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case of saving/ ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install ipex-llm with 'all' option diff --git a/python/llm/example/CPU/QLoRA-FineTuning/README.md b/python/llm/example/CPU/QLoRA-FineTuning/README.md index 33543c12ac0..88106180d1e 100644 --- a/python/llm/example/CPU/QLoRA-FineTuning/README.md +++ b/python/llm/example/CPU/QLoRA-FineTuning/README.md @@ -16,7 +16,7 @@ This example is ported from [bnb-4bit-training](https://colab.research.google.co ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install transformers==4.34.0 diff --git a/python/llm/example/CPU/QLoRA-FineTuning/alpaca-qlora/README.md b/python/llm/example/CPU/QLoRA-FineTuning/alpaca-qlora/README.md index edd4d08d5cd..9641d356f2e 100644 --- a/python/llm/example/CPU/QLoRA-FineTuning/alpaca-qlora/README.md +++ b/python/llm/example/CPU/QLoRA-FineTuning/alpaca-qlora/README.md @@ -5,7 +5,7 @@ This example ports [Alpaca-LoRA](https://github.com/tloen/alpaca-lora/tree/main) ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install datasets transformers==4.35.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/baichuan2/README.md b/python/llm/example/CPU/Speculative-Decoding/baichuan2/README.md index 35c0fab60d9..91f2ca9d889 100644 --- a/python/llm/example/CPU/Speculative-Decoding/baichuan2/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/baichuan2/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/chatglm3/README.md b/python/llm/example/CPU/Speculative-Decoding/chatglm3/README.md index 7d4a2e24595..333a62638f6 100644 --- a/python/llm/example/CPU/Speculative-Decoding/chatglm3/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/chatglm3/README.md @@ -7,7 +7,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] ``` diff --git a/python/llm/example/CPU/Speculative-Decoding/llama2/README.md b/python/llm/example/CPU/Speculative-Decoding/llama2/README.md index 4f76831d870..34646bcc3f9 100644 --- a/python/llm/example/CPU/Speculative-Decoding/llama2/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/llama2/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/mistral/README.md b/python/llm/example/CPU/Speculative-Decoding/mistral/README.md index 0f6c0762bbe..6f824d2b5a4 100644 --- a/python/llm/example/CPU/Speculative-Decoding/mistral/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/mistral/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/qwen/README.md b/python/llm/example/CPU/Speculative-Decoding/qwen/README.md index e00d73f7c80..ec5866f0138 100644 --- a/python/llm/example/CPU/Speculative-Decoding/qwen/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/qwen/README.md @@ -8,7 +8,7 @@ predict the next N tokens using `generate()` API, with IPEX-LLM speculative deco ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install tiktoken einops transformers_stream_generator # additional package required for Qwen to conduct generation diff --git a/python/llm/example/CPU/Speculative-Decoding/starcoder/README.md b/python/llm/example/CPU/Speculative-Decoding/starcoder/README.md index dcb42d991a6..eab5fd8a268 100644 --- a/python/llm/example/CPU/Speculative-Decoding/starcoder/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/starcoder/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/vicuna/README.md b/python/llm/example/CPU/Speculative-Decoding/vicuna/README.md index faf31eb03c3..bd85910fe3c 100644 --- a/python/llm/example/CPU/Speculative-Decoding/vicuna/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/vicuna/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/Speculative-Decoding/ziya/README.md b/python/llm/example/CPU/Speculative-Decoding/ziya/README.md index 769b5519021..837aa357992 100644 --- a/python/llm/example/CPU/Speculative-Decoding/ziya/README.md +++ b/python/llm/example/CPU/Speculative-Decoding/ziya/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] pip install intel_extension_for_pytorch==2.1.0 diff --git a/python/llm/example/CPU/vLLM-Serving/README.md b/python/llm/example/CPU/vLLM-Serving/README.md index c4e4c2bcad7..b7933112fd0 100644 --- a/python/llm/example/CPU/vLLM-Serving/README.md +++ b/python/llm/example/CPU/vLLM-Serving/README.md @@ -14,7 +14,7 @@ To run vLLM continuous batching on Intel CPUs, install the dependencies as follo ```bash # First create an conda environment -conda create -n ipex-vllm python==3.9 +conda create -n ipex-vllm python=3.11 conda activate ipex-vllm # Install dependencies pip3 install numpy diff --git a/python/llm/example/GPU/Applications/autogen/README.md b/python/llm/example/GPU/Applications/autogen/README.md index 2a9f83288e8..9ae4104c3e3 100644 --- a/python/llm/example/GPU/Applications/autogen/README.md +++ b/python/llm/example/GPU/Applications/autogen/README.md @@ -11,7 +11,7 @@ mkdir autogen cd autogen # create respective conda environment -conda create -n autogen python=3.9 +conda create -n autogen python=3.11 conda activate autogen # install xpu-supported and fastchat-adapted ipex-llm diff --git a/python/llm/example/GPU/Applications/streaming-llm/README.md b/python/llm/example/GPU/Applications/streaming-llm/README.md index ae0e1aa7ca1..4e1fd1ad161 100644 --- a/python/llm/example/GPU/Applications/streaming-llm/README.md +++ b/python/llm/example/GPU/Applications/streaming-llm/README.md @@ -10,7 +10,7 @@ model = AutoModelForCausalLM.from_pretrained(model_name_or_path, load_in_4bit=Tr ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install -U transformers==4.34.0 pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Deepspeed-AutoTP/README.md b/python/llm/example/GPU/Deepspeed-AutoTP/README.md index 948bf8c540a..aa408d4e431 100644 --- a/python/llm/example/GPU/Deepspeed-AutoTP/README.md +++ b/python/llm/example/GPU/Deepspeed-AutoTP/README.md @@ -10,7 +10,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md index 59355f71214..cf281a8fdd1 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/AWQ/README.md @@ -33,7 +33,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a AWQ We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF-IQ2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF-IQ2/README.md index c90522f0312..27ace7876c6 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF-IQ2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF-IQ2/README.md @@ -23,7 +23,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a GGUF We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md index c0101fde5c0..a979d5f6051 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GGUF/README.md @@ -23,7 +23,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md index d95075328bb..742ba6ec8bc 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Advanced-Quantizations/GPTQ/README.md @@ -9,7 +9,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila/README.md index 10c44883532..f2b57eb497a 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila/README.md @@ -16,7 +16,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Aqui #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -24,7 +24,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila2/README.md index 689d3821a7a..b68ff6dfaad 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/aquila2/README.md @@ -16,7 +16,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Aqui #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -25,7 +25,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan/README.md index dbebb1d4169..105e1c0b94d 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Baic #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers_stream_generator # additional package required for Bai #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan2/README.md index 502ae4acfad..d7de8ab0208 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/baichuan2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Baic #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers_stream_generator # additional package required for Bai #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/bluelm/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/bluelm/README.md index a075bbf2479..af784432e95 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/bluelm/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/bluelm/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Blue #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm2/README.md index 9a6af846a6d..9f7fbcb7f1e 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm2/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -137,7 +137,7 @@ In the example [streamchat.py](./streamchat.py), we show a basic use case for a #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -145,7 +145,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm3/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm3/README.md index 8087252e92f..607a7a33204 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm3/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chatglm3/README.md @@ -11,7 +11,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -138,7 +138,7 @@ In the example [streamchat.py](./streamchat.py), we show a basic use case for a #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -147,7 +147,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chinese-llama2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chinese-llama2/README.md index 68bf861fa7b..08c49e99b87 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chinese-llama2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/chinese-llama2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codellama/readme.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codellama/readme.md index c19a9c7151e..f5f31406390 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codellama/readme.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/codellama/readme.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an Cod #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher ver #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md index e3da7af0fa9..dd69d0094b4 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deciLM-7b/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage environment. For more information about conda i After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.0.110+xpu as default # you can install specific ipex/torch version for your need @@ -23,7 +23,7 @@ pip install transformers==4.35.2 # required by DeciLM-7B #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deepseek/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deepseek/README.md index 45ba08494c5..d747ba533f8 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deepseek/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/deepseek/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Deep #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.0.110+xpu as default # you can install specific ipex/torch version for your need @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md index a3bef0329ec..664e67aa84d 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/distil-whisper/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -23,7 +23,7 @@ pip install datasets soundfile librosa # required by audio processing #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v1/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v1/README.md index ebcf31b2184..027ff4e8fe9 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v1/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v1/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Doll #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v2/README.md index 7a73f8c27e4..5ab0cf0e3dc 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/dolly-v2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Doll #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/falcon/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/falcon/README.md index 8d415381f0b..c5e96f1c816 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/falcon/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/falcon/README.md @@ -11,7 +11,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Falc #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install einops # additional package required for falcon-7b-instruct to condu #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/flan-t5/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/flan-t5/README.md index 51d750b34c5..f73665f62f4 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/flan-t5/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/flan-t5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gemma/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gemma/README.md index 99db85114a5..98b775f121e 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gemma/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gemma/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.38.1 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gpt-j/readme.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gpt-j/readme.md index dcf795867ac..c8659217c36 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gpt-j/readme.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/gpt-j/readme.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a GPT- #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm/README.md index 0b35a40ed2c..c784dedb0be 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm2/README.md index d58d103ea88..a6e32dd83b9 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/internlm2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/llama2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/llama2/README.md index dbeb9520b85..97b6deee6ff 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/llama2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/llama2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mistral/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mistral/README.md index 4dd1bac0d87..784134197fb 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mistral/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mistral/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.34.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mixtral/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mixtral/README.md index d87c8babd12..47c9e7282ad 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mixtral/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mixtral/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.36.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mpt/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mpt/README.md index e9bea4906f6..99092cf9bf8 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mpt/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/mpt/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an MPT #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install einops # additional package required for mpt-7b-chat and mpt-30b-ch #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md index 198e73ba26f..98868833682 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-1_5/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a phi- #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install einops # additional package required for phi-1_5 to conduct generati #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-2/README.md index f7030b261d4..353d6e516cc 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phi-2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a phi- #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install einops # additional package required for phi-2 to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phixtral/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phixtral/README.md index 7a05488d2cc..e91daf299c4 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phixtral/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/phixtral/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md index fe044d10906..f8d67544144 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen-vl/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install accelerate tiktoken einops transformers_stream_generator==0.0.4 scip #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen/README.md index 7b20fcf1767..b475d831bfe 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Qwen #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install tiktoken einops transformers_stream_generator # additional package #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md index 656e893358e..830d4d267fc 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/qwen1.5/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Qwen #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers==4.37.0 # install transformers which supports Qwen2 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/redpajama/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/redpajama/README.md index ddb34896934..201046af1b0 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/redpajama/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/redpajama/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/replit/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/replit/README.md index a4626d9938d..9e6930a535f 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/replit/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/replit/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install "transformers<4.35" #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv4/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv4/README.md index b2a1ccf6a27..5ec3e3f0a3e 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv4/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv4/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a RWKV #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv5/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv5/README.md index b0d783fd39c..c924fc25843 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv5/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/rwkv5/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a RWKV #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/solar/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/solar/README.md index 343582176f2..72a3562d6fa 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/solar/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/solar/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a SOLA #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers==4.35.2 # required by SOLAR #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/stablelm/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/stablelm/README.md index b58df91cb83..ce694c4968a 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/stablelm/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/stablelm/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -25,7 +25,7 @@ pip install transformers==4.38.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/starcoder/readme.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/starcoder/readme.md index 41ddf26c48c..d0c6a257b0f 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/starcoder/readme.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/starcoder/readme.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an Sta #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/vicuna/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/vicuna/README.md index f53ecb71b50..9b71962514f 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/vicuna/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/vicuna/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Vicu #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/voiceassistant/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/voiceassistant/README.md index 07d0d4af8c5..f34731df14b 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/voiceassistant/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/voiceassistant/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Whis #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -26,7 +26,7 @@ pip install PyAudio inquirer sounddevice #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/whisper/readme.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/whisper/readme.md index dd6841143d0..377b8592688 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/whisper/readme.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/whisper/readme.md @@ -11,7 +11,7 @@ In the example [recognize.py](./recognize.py), we show a basic use case for a Wh #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install datasets soundfile librosa # required by audio processing #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yi/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yi/README.md index 6995e24b4a6..cb020717e6d 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yi/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yi/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install einops # additional package required for Yi-6B to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yuan2/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yuan2/README.md index b0a664136a7..d67ac916351 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yuan2/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Model/yuan2/README.md @@ -12,7 +12,7 @@ In the example [generate.py](./generate.py), we show a basic use case for an Yua #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option @@ -22,7 +22,7 @@ pip install pandas # additional package required for Yuan2 to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/More-Data-Types/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/More-Data-Types/README.md index 2a8a766186f..d97d0e40361 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/More-Data-Types/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/More-Data-Types/README.md @@ -5,7 +5,7 @@ In this example, we show a pipeline to apply IPEX-LLM low-bit optimizations (inc ## Prepare Environment We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default diff --git a/python/llm/example/GPU/HF-Transformers-AutoModels/Save-Load/README.md b/python/llm/example/GPU/HF-Transformers-AutoModels/Save-Load/README.md index 53c38b13332..f9849ff8248 100644 --- a/python/llm/example/GPU/HF-Transformers-AutoModels/Save-Load/README.md +++ b/python/llm/example/GPU/HF-Transformers-AutoModels/Save-Load/README.md @@ -11,7 +11,7 @@ In the example [generate.py](./generate.py), we show a basic use case of saving/ #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/DPO/README.md b/python/llm/example/GPU/LLM-Finetuning/DPO/README.md index eeed9519e6e..076e564206b 100644 --- a/python/llm/example/GPU/LLM-Finetuning/DPO/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/DPO/README.md @@ -13,7 +13,7 @@ This example is ported from [Fine_tune_a_Mistral_7b_model_with_DPO](https://gith ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/HF-PEFT/README.md b/python/llm/example/GPU/LLM-Finetuning/HF-PEFT/README.md index b847fdce358..7da65981cbf 100644 --- a/python/llm/example/GPU/LLM-Finetuning/HF-PEFT/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/HF-PEFT/README.md @@ -10,7 +10,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/LoRA/README.md b/python/llm/example/GPU/LLM-Finetuning/LoRA/README.md index 4af01ab0191..8ef75a28949 100644 --- a/python/llm/example/GPU/LLM-Finetuning/LoRA/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/LoRA/README.md @@ -8,7 +8,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/QA-LoRA/README.md b/python/llm/example/GPU/LLM-Finetuning/QA-LoRA/README.md index 5ab124f07b9..006f6630872 100644 --- a/python/llm/example/GPU/LLM-Finetuning/QA-LoRA/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/QA-LoRA/README.md @@ -8,7 +8,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/QLoRA/alpaca-qlora/README.md b/python/llm/example/GPU/LLM-Finetuning/QLoRA/alpaca-qlora/README.md index 9893c76316a..4cdb3d26241 100644 --- a/python/llm/example/GPU/LLM-Finetuning/QLoRA/alpaca-qlora/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/QLoRA/alpaca-qlora/README.md @@ -10,7 +10,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/QLoRA/simple-example/README.md b/python/llm/example/GPU/LLM-Finetuning/QLoRA/simple-example/README.md index 15b63674ff1..fe682829c7c 100644 --- a/python/llm/example/GPU/LLM-Finetuning/QLoRA/simple-example/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/QLoRA/simple-example/README.md @@ -13,7 +13,7 @@ This example is referred to [bnb-4bit-training](https://colab.research.google.co ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/QLoRA/trl-example/README.md b/python/llm/example/GPU/LLM-Finetuning/QLoRA/trl-example/README.md index 0ba053f89d8..46e8992b902 100644 --- a/python/llm/example/GPU/LLM-Finetuning/QLoRA/trl-example/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/QLoRA/trl-example/README.md @@ -13,7 +13,7 @@ This example utilizes a subset of [yahma/alpaca-cleaned](https://huggingface.co/ ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/LLM-Finetuning/ReLora/README.md b/python/llm/example/GPU/LLM-Finetuning/ReLora/README.md index 3218948be4d..0e94a63a1fb 100644 --- a/python/llm/example/GPU/LLM-Finetuning/ReLora/README.md +++ b/python/llm/example/GPU/LLM-Finetuning/ReLora/README.md @@ -8,7 +8,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1. Install ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Long-Context/LLaMA2-32K/README.md b/python/llm/example/GPU/Long-Context/LLaMA2-32K/README.md index 05c0661b9aa..677b4742c2f 100644 --- a/python/llm/example/GPU/Long-Context/LLaMA2-32K/README.md +++ b/python/llm/example/GPU/Long-Context/LLaMA2-32K/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Llam #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/ModelScope-Models/README.md b/python/llm/example/GPU/ModelScope-Models/README.md index 331638a36e1..fe3227c2d91 100644 --- a/python/llm/example/GPU/ModelScope-Models/README.md +++ b/python/llm/example/GPU/ModelScope-Models/README.md @@ -11,7 +11,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Chat #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install modelscope==1.11.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/ModelScope-Models/Save-Load/README.md b/python/llm/example/GPU/ModelScope-Models/Save-Load/README.md index 2dfcc23830a..33b1b900497 100644 --- a/python/llm/example/GPU/ModelScope-Models/Save-Load/README.md +++ b/python/llm/example/GPU/ModelScope-Models/Save-Load/README.md @@ -11,7 +11,7 @@ In the example [generate.py](./generate.py), we show a basic use case of saving/ #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install modelscope==1.11.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Pipeline-Parallel-Inference/README.md b/python/llm/example/GPU/Pipeline-Parallel-Inference/README.md index 7162b757590..58379184298 100644 --- a/python/llm/example/GPU/Pipeline-Parallel-Inference/README.md +++ b/python/llm/example/GPU/Pipeline-Parallel-Inference/README.md @@ -10,7 +10,7 @@ To run this example with IPEX-LLM on Intel GPUs, we have some recommended requir ### 1.1 Install IPEX-LLM ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default # you can install specific ipex/torch version for your need diff --git a/python/llm/example/GPU/PyTorch-Models/Model/aquila2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/aquila2/README.md index a9597f975fa..32da14eafb4 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/aquila2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/aquila2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/baichuan/README.md b/python/llm/example/GPU/PyTorch-Models/Model/baichuan/README.md index ce470ec9a45..be7501ec8aa 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/baichuan/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/baichuan/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install transformers_stream_generator # additional package required for Bai #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/baichuan2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/baichuan2/README.md index fdf78524270..11e5dad84a6 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/baichuan2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/baichuan2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install transformers_stream_generator # additional package required for Bai #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/bark/README.md b/python/llm/example/GPU/PyTorch-Models/Model/bark/README.md index 07d9411ab02..05f34949ac9 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/bark/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/bark/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install scipy #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default diff --git a/python/llm/example/GPU/PyTorch-Models/Model/bluelm/README.md b/python/llm/example/GPU/PyTorch-Models/Model/bluelm/README.md index 8eac3142646..fc6f47fbf44 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/bluelm/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/bluelm/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/chatglm2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/chatglm2/README.md index 72c0e775981..afda5bb6099 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/chatglm2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/chatglm2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -136,7 +136,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -146,7 +146,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/chatglm3/README.md b/python/llm/example/GPU/PyTorch-Models/Model/chatglm3/README.md index df8ed461cfc..278888b993c 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/chatglm3/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/chatglm3/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -135,7 +135,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -145,7 +145,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/codellama/README.md b/python/llm/example/GPU/PyTorch-Models/Model/codellama/README.md index 0c9ac640e31..01115cefa4b 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/codellama/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/codellama/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install transformers==4.34.1 # CodeLlamaTokenizer is supported in higher ver #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/deciLM-7b/README.md b/python/llm/example/GPU/PyTorch-Models/Model/deciLM-7b/README.md index 01206c19eea..644c0205112 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/deciLM-7b/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/deciLM-7b/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.0.110+xpu as default @@ -25,7 +25,7 @@ pip install transformers==4.35.2 # required by DeciLM-7B #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/deepseek/README.md b/python/llm/example/GPU/PyTorch-Models/Model/deepseek/README.md index 55d5eaab1a4..d3c76f9f038 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/deepseek/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/deepseek/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.0.110+xpu as default @@ -23,7 +23,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/distil-whisper/README.md b/python/llm/example/GPU/PyTorch-Models/Model/distil-whisper/README.md index 9de7587bd96..d72abcf303c 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/distil-whisper/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/distil-whisper/README.md @@ -13,7 +13,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -23,7 +23,7 @@ pip install datasets soundfile librosa # required by audio processing #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/dolly-v1/README.md b/python/llm/example/GPU/PyTorch-Models/Model/dolly-v1/README.md index 6a67390c79a..4f80a814ce5 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/dolly-v1/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/dolly-v1/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/dolly-v2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/dolly-v2/README.md index 24871ddb603..28dab67b077 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/dolly-v2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/dolly-v2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/flan-t5/README.md b/python/llm/example/GPU/PyTorch-Models/Model/flan-t5/README.md index 84714a32ec6..d42a7cb2787 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/flan-t5/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/flan-t5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/internlm2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/internlm2/README.md index d58d103ea88..a6e32dd83b9 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/internlm2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/internlm2/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Inte #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -19,7 +19,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/llama2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/llama2/README.md index ab29daa6f7a..b801c7fb41d 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/llama2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/llama2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/llava/README.md b/python/llm/example/GPU/PyTorch-Models/Model/llava/README.md index aff37cd105d..4eefd7145b0 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/llava/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/llava/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ cd LLaVA # change the working directory to the LLaVA folder #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/mamba/README.md b/python/llm/example/GPU/PyTorch-Models/Model/mamba/README.md index 085e440d15d..7c30497a4dc 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/mamba/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/mamba/README.md @@ -11,7 +11,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.0.110+xpu as default diff --git a/python/llm/example/GPU/PyTorch-Models/Model/mistral/README.md b/python/llm/example/GPU/PyTorch-Models/Model/mistral/README.md index 8fdaa738fc7..565470e5a1c 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/mistral/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/mistral/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.34.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/mixtral/README.md b/python/llm/example/GPU/PyTorch-Models/Model/mixtral/README.md index d617ed4ea49..8f4a4dab4af 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/mixtral/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/mixtral/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -27,7 +27,7 @@ pip install transformers==4.36.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/phi-1_5/README.md b/python/llm/example/GPU/PyTorch-Models/Model/phi-1_5/README.md index 3a45012b5a3..54a72a07aed 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/phi-1_5/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/phi-1_5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install einops # additional package required for phi-1_5 to conduct generati #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/phi-2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/phi-2/README.md index 0f6c8bbf7d7..4a201625544 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/phi-2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/phi-2/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -21,7 +21,7 @@ pip install einops # additional package required for phi-2 to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/phixtral/README.md b/python/llm/example/GPU/PyTorch-Models/Model/phixtral/README.md index 61743b12690..9f1a33be768 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/phixtral/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/phixtral/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install einops # additional package required for phixtral to conduct generat #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/qwen-vl/README.md b/python/llm/example/GPU/PyTorch-Models/Model/qwen-vl/README.md index 80a65a59328..473cac40b7c 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/qwen-vl/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/qwen-vl/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -22,7 +22,7 @@ pip install accelerate tiktoken einops transformers_stream_generator==0.0.4 scip #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/qwen1.5/README.md b/python/llm/example/GPU/PyTorch-Models/Model/qwen1.5/README.md index daed43902d8..86b0f8c78cf 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/qwen1.5/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/qwen1.5/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case for a Qwen #### 1.1 Installation on Linux We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ @@ -20,7 +20,7 @@ pip install transformers==4.37.0 # install transformers which supports Qwen2 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/replit/README.md b/python/llm/example/GPU/PyTorch-Models/Model/replit/README.md index f9c19c19e9e..8ad736332bb 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/replit/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/replit/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install "transformers<4.35" #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/solar/README.md b/python/llm/example/GPU/PyTorch-Models/Model/solar/README.md index 6eb6f05263b..e0802db7a73 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/solar/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/solar/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install transformers==4.35.2 # required by SOLAR #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/speech-t5/README.md b/python/llm/example/GPU/PyTorch-Models/Model/speech-t5/README.md index 239877c6acc..a0a1020cde7 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/speech-t5/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/speech-t5/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install "datasets<2.18" soundfile # additional package required for SpeechT5 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default diff --git a/python/llm/example/GPU/PyTorch-Models/Model/stablelm/README.md b/python/llm/example/GPU/PyTorch-Models/Model/stablelm/README.md index 656195b198d..f322d64f191 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/stablelm/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/stablelm/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -25,7 +25,7 @@ pip install transformers==4.38.0 #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/starcoder/README.md b/python/llm/example/GPU/PyTorch-Models/Model/starcoder/README.md index ae0eee668b4..9580c1a8325 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/starcoder/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/starcoder/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -22,7 +22,7 @@ pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-exte #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/yi/README.md b/python/llm/example/GPU/PyTorch-Models/Model/yi/README.md index 4562972e75d..bac21baf20e 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/yi/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/yi/README.md @@ -12,7 +12,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 # recommend to use Python 3.9 +conda create -n llm python=3.11 # recommend to use Python 3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default @@ -23,7 +23,7 @@ pip install einops # additional package required for Yi-6B to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Model/yuan2/README.md b/python/llm/example/GPU/PyTorch-Models/Model/yuan2/README.md index c5364a4256b..2def531d573 100644 --- a/python/llm/example/GPU/PyTorch-Models/Model/yuan2/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Model/yuan2/README.md @@ -14,7 +14,7 @@ We suggest using conda to manage the Python environment. For more information ab After installing conda, create a Python environment for IPEX-LLM: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm pip install --pre --upgrade ipex-llm[all] # install the latest ipex-llm nightly build with 'all' option @@ -24,7 +24,7 @@ pip install pandas # additional package required for Yuan2 to conduct generation #### 1.2 Installation on Windows We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 libuv +conda create -n llm python=3.11 libuv conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/More-Data-Types/README.md b/python/llm/example/GPU/PyTorch-Models/More-Data-Types/README.md index e3b223dffb3..4a739e55131 100644 --- a/python/llm/example/GPU/PyTorch-Models/More-Data-Types/README.md +++ b/python/llm/example/GPU/PyTorch-Models/More-Data-Types/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case of low-bit ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/PyTorch-Models/Save-Load/README.md b/python/llm/example/GPU/PyTorch-Models/Save-Load/README.md index 939625163eb..0efc1af2943 100644 --- a/python/llm/example/GPU/PyTorch-Models/Save-Load/README.md +++ b/python/llm/example/GPU/PyTorch-Models/Save-Load/README.md @@ -10,7 +10,7 @@ In the example [generate.py](./generate.py), we show a basic use case of saving/ ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/baichuan2/README.md b/python/llm/example/GPU/Speculative-Decoding/baichuan2/README.md index 8f82d35fbbc..2f9fd573e73 100644 --- a/python/llm/example/GPU/Speculative-Decoding/baichuan2/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/baichuan2/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/chatglm3/README.md b/python/llm/example/GPU/Speculative-Decoding/chatglm3/README.md index eec1f6ed682..8766bf3dd35 100644 --- a/python/llm/example/GPU/Speculative-Decoding/chatglm3/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/chatglm3/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/gpt-j/README.md b/python/llm/example/GPU/Speculative-Decoding/gpt-j/README.md index 9ec03e5e323..9f82533a96c 100644 --- a/python/llm/example/GPU/Speculative-Decoding/gpt-j/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/gpt-j/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/llama2/README.md b/python/llm/example/GPU/Speculative-Decoding/llama2/README.md index a8648c1d565..d25f77c6ce9 100644 --- a/python/llm/example/GPU/Speculative-Decoding/llama2/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/llama2/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/mistral/README.md b/python/llm/example/GPU/Speculative-Decoding/mistral/README.md index eebad70aea5..12fbeb41651 100644 --- a/python/llm/example/GPU/Speculative-Decoding/mistral/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/mistral/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/Speculative-Decoding/qwen/README.md b/python/llm/example/GPU/Speculative-Decoding/qwen/README.md index 40607d1f4d6..515aaf7b5f3 100644 --- a/python/llm/example/GPU/Speculative-Decoding/qwen/README.md +++ b/python/llm/example/GPU/Speculative-Decoding/qwen/README.md @@ -9,7 +9,7 @@ In the example [speculative.py](./speculative.py), we show a basic use case for ### 1. Install We suggest using conda to manage environment: ```bash -conda create -n llm python=3.9 +conda create -n llm python=3.11 conda activate llm # below command will install intel_extension_for_pytorch==2.1.10+xpu as default pip install --pre --upgrade ipex-llm[xpu] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ diff --git a/python/llm/example/GPU/vLLM-Serving/README.md b/python/llm/example/GPU/vLLM-Serving/README.md index 02f723794b5..92079c89fc4 100644 --- a/python/llm/example/GPU/vLLM-Serving/README.md +++ b/python/llm/example/GPU/vLLM-Serving/README.md @@ -31,7 +31,7 @@ To run vLLM continuous batching on Intel GPUs, install the dependencies as follo ```bash # First create an conda environment -conda create -n ipex-vllm python==3.9 +conda create -n ipex-vllm python=3.11 conda activate ipex-vllm # Install dependencies pip3 install psutil diff --git a/python/llm/scripts/env-check.sh b/python/llm/scripts/env-check.sh index 7169858e119..fef3383758f 100644 --- a/python/llm/scripts/env-check.sh +++ b/python/llm/scripts/env-check.sh @@ -20,7 +20,7 @@ check_python() retval="0" fi else - echo "No Python found! Please use `conda create -n llm python=3.9` to create environment. More details could be found in the README.md" + echo "No Python found! Please use `conda create -n llm python=3.11` to create environment. More details could be found in the README.md" retval="1" fi return "$retval"