diff --git a/.github/workflows/manually_build.yml b/.github/workflows/manually_build.yml index 24757b19ef6..fabf4dbb0f0 100644 --- a/.github/workflows/manually_build.yml +++ b/.github/workflows/manually_build.yml @@ -36,6 +36,7 @@ on: required: true default: '2.1.0-SNAPSHOT' type: string + push: env: TAG: ${{ inputs.tag }} @@ -225,34 +226,38 @@ jobs: sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} ${image}:latest ipex-llm-cpu: - if: ${{ inputs.artifact == 'ipex-llm-cpu' || inputs.artifact == 'all' }} - runs-on: [self-hosted, Shire] + # if: ${{ inputs.artifact == 'ipex-llm-cpu' || inputs.artifact == 'all' }} + runs-on: ubuntu-latest steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # actions/checkout@v3 - - name: docker login - run: | - docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + # - name: docker login + # run: | + # docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD} + - name: Log in to Docker Hub + run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u intelanalytics --password-stdin - name: ipex-llm-cpu run: | echo "##############################################################" echo "####### ipex-llm-cpu ########" echo "##############################################################" + # docker pull intelanalytics/ipex-llm-cpu:test + # docker push intelanalytics/ipex-llm-cpu:test export image=intelanalytics/ipex-llm-cpu + export TAG=test cd docker/llm/inference/cpu/docker - sudo docker build \ + docker build \ --no-cache=true \ - --build-arg http_proxy=${HTTP_PROXY} \ - --build-arg https_proxy=${HTTPS_PROXY} \ - --build-arg no_proxy=${NO_PROXY} \ -t ${image}:${TAG} -f ./Dockerfile . - sudo docker push ${image}:${TAG} - sudo docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} - sudo docker push 10.239.45.10/arda/${image}:${TAG} + docker login + docker images + docker push ${image}:${TAG} + docker tag ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} + # docker push 10.239.45.10/arda/${image}:${TAG} # tag 'latest' - sudo docker tag ${image}:${TAG} ${image}:latest - sudo docker push ${image}:latest - sudo docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} ${image}:latest + docker tag ${image}:${TAG} ${image}:latest + docker push ${image}:latest + docker rmi -f ${image}:${TAG} 10.239.45.10/arda/${image}:${TAG} ${image}:latest ipex-llm-serving-xpu: if: ${{ inputs.artifact == 'ipex-llm-serving-xpu' || inputs.artifact == 'all' }} diff --git a/.github/workflows/nightly_build.yml b/.github/workflows/nightly_build.yml index 3d667443024..6fc3027fc62 100644 --- a/.github/workflows/nightly_build.yml +++ b/.github/workflows/nightly_build.yml @@ -1,7 +1,7 @@ name: Nightly Build on: - # pull_request: + push: # branches: [ main ] # paths: # - '.github/workflows/nightly_build.yml' @@ -17,14 +17,14 @@ permissions: jobs: - llm-cpp-build: - uses: ./.github/workflows/llm-binary-build.yml + # llm-cpp-build: + # uses: ./.github/workflows/llm-binary-build.yml ipex-llm-build: # python build can only be published once a day, please do not publish it manually - if: ${{ github.event.schedule || github.event_name == 'workflow_dispatch' }} - runs-on: [self-hosted, Bree] - needs: llm-cpp-build + # if: ${{ github.event.schedule || github.event_name == 'workflow_dispatch' }} + runs-on: ubuntu-latest + # needs: llm-cpp-build steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # actions/checkout@v3 @@ -40,13 +40,17 @@ jobs: pip install wheel pip install twine - - name: Download llm binary - uses: ./.github/actions/llm/download-llm-binary + # - name: Download llm binary + # uses: ./.github/actions/llm/download-llm-binary - name: Build package + env: + TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} + TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} + SOURCEFORGE_PW: ${{ secrets.SOURCEFORGE_PW }} run: | - export TIMESTAMP=`date '+%Y%m%d'` - # export TIMESTAMP=20240323 + # export TIMESTAMP=`date '+%Y%m%d'` + export TIMESTAMP=20240103 export PYPI_VERSION=2.1.0 nb_version=${PYPI_VERSION}b${TIMESTAMP} echo ${nb_version} @@ -57,9 +61,9 @@ jobs: ## linux ## bash python/llm/dev/release_default_linux.sh ${nb_version} true - docker-build: - if: ${{ github.event.schedule }} - uses: ./.github/workflows/manually_build.yml - with: - artifact: all - tag: 2.1.0-SNAPSHOT + # docker-build: + # if: ${{ github.event.schedule }} + # uses: ./.github/workflows/manually_build.yml + # with: + # artifact: all + # tag: 2.1.0-SNAPSHOT diff --git a/python/llm/dev/release.sh b/python/llm/dev/release.sh index 93ca7d57197..d273a815ebe 100644 --- a/python/llm/dev/release.sh +++ b/python/llm/dev/release.sh @@ -74,10 +74,10 @@ if [ ${upload} == true ]; then # upload to pypi upload_to_pypi_command="twine upload dist/ipex_llm-${ipex_llm_version}-*-${verbose_pname}.whl" echo "Please manually upload with this command: $upload_to_pypi_command" - $upload_to_pypi_command + # $upload_to_pypi_command # upload to sourceforge - rsync -avzr -e \ + rsync -avzr -e --verbose \ "sshpass -p '${SOURCEFORGE_PW}' ssh -o StrictHostKeyChecking=no" \ ./dist/ipex_llm-${ipex_llm_version}-*-${verbose_pname}.whl \ intelanalytics@frs.sourceforge.net:/home/frs/project/analytics-zoo/ipex-llm-whl/ipex-llm/${ipex_llm_version}/