diff --git a/ppml/services/bigdl-attestation-service/docker/Dockerfile b/ppml/services/bigdl-attestation-service/docker/Dockerfile index a6a5773bad5..82a2dd77dac 100644 --- a/ppml/services/bigdl-attestation-service/docker/Dockerfile +++ b/ppml/services/bigdl-attestation-service/docker/Dockerfile @@ -1,5 +1,5 @@ ARG BASE_IMAGE_NAME=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-base -ARG BASE_IMAGE_TAG=2.3.0-SNAPSHOT +ARG BASE_IMAGE_TAG=2.3.0 FROM $BASE_IMAGE_NAME:$BASE_IMAGE_TAG as attestation-service-base diff --git a/ppml/services/bigdl-attestation-service/docker/README.md b/ppml/services/bigdl-attestation-service/docker/README.md index 50cc711d352..03a873f9340 100644 --- a/ppml/services/bigdl-attestation-service/docker/README.md +++ b/ppml/services/bigdl-attestation-service/docker/README.md @@ -1,7 +1,7 @@ ## 1. Build image You can pull the BigDL Remote Attestation Service image from dockerhub. ``` bash -docker pull intelanalytics/bigdl-attestation-service:2.3.0-SNAPSHOT +docker pull intelanalytics/bigdl-attestation-service:2.3.0 ``` Or you can clone BigDL repository and build the image with `build-docker-image.sh`. First you need to generate your enclave key using the command below, and keep it safely for future remote attestations and to start SGX enclaves more securely. @@ -20,7 +20,7 @@ export DATA_PATH= export KEYS_PATH= export NFS_INPUT_PATH= export LOCAL_IP= -export DOCKER_IMAGE=intelanalytics/bigdl-attestation-service:2.3.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-attestation-service:2.3.0 export PCCS_URL= export HTTPS_KEY_STORE_TOKEN= diff --git a/ppml/services/bigdl-attestation-service/docker/build-docker-image.sh b/ppml/services/bigdl-attestation-service/docker/build-docker-image.sh index 2e68ecf32cc..1d6bd9ef81b 100644 --- a/ppml/services/bigdl-attestation-service/docker/build-docker-image.sh +++ b/ppml/services/bigdl-attestation-service/docker/build-docker-image.sh @@ -1,8 +1,8 @@ # set -x export BASE_IMAGE_NAME=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-base -export BASE_IMAGE_TAG=2.3.0-SNAPSHOT +export BASE_IMAGE_TAG=2.3.0 export IMAGE_NAME=intelanalytics/bigdl-attestation-service -export IMAGE_VERSION=2.3.0-SNAPSHOT +export IMAGE_VERSION=2.3.0 sudo docker build \ --no-cache=true \ diff --git a/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service-tdvm.yaml b/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service-tdvm.yaml index b26fb6ddeea..7644b58a389 100644 --- a/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service-tdvm.yaml +++ b/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service-tdvm.yaml @@ -48,7 +48,7 @@ spec: path: your_service_keys_path containers: - name: remote-attestation-service - image: intelanalytics/bigdl-attestation-service:2.3.0-SNAPSHOT + image: intelanalytics/bigdl-attestation-service:2.3.0 imagePullPolicy: Always command: ["/ppml/bigdl-as-entrypoint.sh"] env: diff --git a/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service.yaml b/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service.yaml index 7804277a3d5..1014f8d6864 100644 --- a/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service.yaml +++ b/ppml/services/bigdl-attestation-service/kubernetes/bigdl-attestation-service.yaml @@ -51,7 +51,7 @@ spec: path: your_service_keys_path containers: - name: remote-attestation-service - image: intelanalytics/bigdl-attestation-service:2.3.0-SNAPSHOT + image: intelanalytics/bigdl-attestation-service:2.3.0 imagePullPolicy: IfNotPresent command: ["/ppml/bigdl-as-entrypoint.sh"] env: diff --git a/ppml/services/kms-utils/docker/Dockerfile b/ppml/services/kms-utils/docker/Dockerfile index 62f664a48ec..cfa124147d7 100644 --- a/ppml/services/kms-utils/docker/Dockerfile +++ b/ppml/services/kms-utils/docker/Dockerfile @@ -1,6 +1,6 @@ ARG JDK_VERSION=8u192 ARG SPARK_VERSION=3.1.3 -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 # stage.1 java & spark FROM ubuntu:20.04 AS builder diff --git a/ppml/tdx/docker/trusted-deep-learning/README.md b/ppml/tdx/docker/trusted-deep-learning/README.md index dcb0a747fb7..3a67fdc1cf4 100644 --- a/ppml/tdx/docker/trusted-deep-learning/README.md +++ b/ppml/tdx/docker/trusted-deep-learning/README.md @@ -4,5 +4,5 @@ docker build \ --build-arg http_proxy=.. \ --build-arg https_proxy=.. \ --build-arg no_proxy=.. \ - --rm --no-cache -t intelanalytics/bigdl-ppml-trusted-deep-learning:2.3.0-SNAPSHOT . + --rm --no-cache -t intelanalytics/bigdl-ppml-trusted-deep-learning:2.3.0 . ``` diff --git a/ppml/tdx/tdx-cc/trusted-bigd-data/client-image/README.md b/ppml/tdx/tdx-cc/trusted-bigd-data/client-image/README.md index b5a5fb6b3de..3dadc8885fe 100644 --- a/ppml/tdx/tdx-cc/trusted-bigd-data/client-image/README.md +++ b/ppml/tdx/tdx-cc/trusted-bigd-data/client-image/README.md @@ -4,5 +4,5 @@ docker build \ --build-arg http_proxy=.. \ --build-arg https_proxy=.. \ --build-arg no_proxy=.. \ - --rm --no-cache -t intelanalytics/bigdl-tdx-cc-client-spark-3.1.3:2.3.0-SNAPSHOT . + --rm --no-cache -t intelanalytics/bigdl-tdx-cc-client-spark-3.1.3:2.3.0 . ``` diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/Dockerfile b/ppml/trusted-big-data-ml/scala/docker-occlum/Dockerfile index ab1e056ec53..b06df7d0781 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/Dockerfile +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/Dockerfile @@ -173,7 +173,7 @@ RUN cd /opt && bash ./install_python_with_conda.sh FROM occlum/occlum:0.29.5-ubuntu20.04 as ppml MAINTAINER The BigDL Authors https://github.com/intel-analytics/BigDL -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG SPARK_VERSION ARG HADOOP_VERSION ENV HADOOP_VERSION=${HADOOP_VERSION} diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/README.md index 349cd4028bd..6840281333f 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/README.md @@ -27,7 +27,7 @@ After our many tests, Using MALLOC_ARENA_MAX=1 is fine in most cases. Hadoop rec Pull image from dockerhub. ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 ``` ## Before running the example diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md index abef4fb96b7..eb0b64b1e77 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/README.md @@ -16,12 +16,12 @@ After our many tests, Using MALLOC_ARENA_MAX=1 is fine in most cases. Hadoop rec ## Prerequisite * Check Kubernetes env or Install Kubernetes from [wiki](https://kubernetes.io/zh/docs/setup/production-environment) -* Prepare image `intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT` +* Prepare image `intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0` 1. Pull image from Dockerhub ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 ``` If Dockerhub is not accessible, we can build a docker image with Dockerfile and modify the path in the build-docker-image.sh firstly. diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_pi.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_pi.sh index ef3ac6f8f14..03243294139 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_pi.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_pi.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name pyspark-pi \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sklearn_example.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sklearn_example.sh index 58b20042b3e..207cb3abd00 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sklearn_example.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sklearn_example.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name pyspark-sklearn \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sql_example.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sql_example.sh index 736e70f00d6..ab302a8412a 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sql_example.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_sql_example.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name pyspark-sql \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_tpch.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_tpch.sh index 69a04e073f4..d21dd77dbdd 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_tpch.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_pyspark_tpch.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name pyspark-tpch \ --conf spark.executor.instances=2 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh index a49ea81a1da..9219ce1d674 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.ml.GBTExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparkgbt" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt_criteo.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt_criteo.sh index 3f6d448c0c7..d605263dc9f 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt_criteo.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_gbt_criteo.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name spark-gbt \ --class com.intel.analytics.bigdl.dllib.example.nnframes.gbt.gbtClassifierTrainingExampleOnCriteoClickLogsDataset \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lgbm.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lgbm.sh index b5d4e44f2d9..fc1bfec7fad 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lgbm.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lgbm.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name spark-lgbm \ --class com.intel.analytics.bigdl.dllib.example.nnframes.lightGBM.LgbmClassifierTrain \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.container.image.pullPolicy="IfNotPresent" \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh index 934d174d170..2944ceab2b9 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_lr.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.ml.LogisticRegressionExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparklr" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh index 969ef5a0817..b12fe622976 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_pi.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.SparkPi \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh index cab0c1552b2..f838a9d6917 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_sql.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class org.apache.spark.examples.sql.SparkSQLExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.podNamePrefix="sparksql" \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_tpch.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_tpch.sh index 13e4c5ca6c9..3fa0b5da7fc 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_tpch.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_tpch.sh @@ -6,7 +6,7 @@ ${SPARK_HOME}/bin/spark-submit \ --name spark-tpch \ --class com.intel.analytics.bigdl.ppml.examples.tpch.TpchQuery \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.container.image.pullPolicy="IfNotPresent" \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh index b5ffda2cc87..4c337aaa6ec 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/kubernetes/run_spark_xgboost.sh @@ -7,7 +7,7 @@ ${SPARK_HOME}/bin/spark-submit \ --class com.intel.analytics.bigdl.dllib.example.nnframes.xgboost.xgbClassifierTrainingExample \ --conf spark.executor.instances=1 \ --conf spark.rpc.netty.dispatcher.numThreads=32 \ - --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + --conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \ --conf spark.kubernetes.executor.deleteOnTermination=false \ --conf spark.kubernetes.driver.podTemplateFile=./driver.yaml \ diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/production/Dockerfile b/ppml/trusted-big-data-ml/scala/docker-occlum/production/Dockerfile index 8ef26b45045..f0d908aea5c 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/production/Dockerfile +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/production/Dockerfile @@ -166,7 +166,7 @@ RUN cd /opt && bash ./install_python_with_conda.sh FROM occlum/occlum:0.29.5-ubuntu20.04 as ppml MAINTAINER The BigDL Authors https://github.com/intel-analytics/BigDL -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG SPARK_VERSION ARG HADOOP_VERSION ENV HADOOP_VERSION=${HADOOP_VERSION} diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/production/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/production/README.md index 945bd207f7d..3ce6e252933 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/production/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/production/README.md @@ -14,7 +14,7 @@ final_name=`intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-productio ### Pull production image from dockerhub and add self libs or source code. ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-SNAPSHOT +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0 ``` 1. enter image ```bash @@ -33,7 +33,7 @@ sudo docker run -it \ -e SGX_KERNEL_HEAP=1GB \ -e ENABLE_SGX_DEBUG=true \ -e ATTESTATION=true \ - intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-SNAPSHOT \ + intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0 \ bash ``` 2. Add python code into /opt/py-examples, add python libs in to /opt/python-occlum, add jars into $BIGDL_HOME/jars. @@ -49,7 +49,7 @@ docker commit $container_name $container_name-build ### Pull production-build image from dockerhub. ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-SNAPSHOT-build +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-build ``` ## Using BigDL PPML Occlum EHSM Attestation diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/Dockerfile b/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/Dockerfile index 71f85e1936a..95e1ad5a9f3 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/Dockerfile +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/Dockerfile @@ -7,7 +7,7 @@ RUN rm -rf /opt/occlum_spark/image FROM ubuntu:20.04 MAINTAINER The BigDL Authors https://github.com/intel-analytics/BigDL -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG SPARK_VERSION=3.1.3 ARG HADOOP_VERSION=3.2.0 ARG SPARK_SCALA_VERSION=2.12 diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/README.md b/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/README.md index bd153f84b85..6b49244e0d1 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/README.md +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/production/customer/README.md @@ -11,7 +11,7 @@ the final image is called `intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-o ### Pull customer image from dockerhub ```bash -docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production-customer:2.3.0-SNAPSHOT-build +docker pull intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production-customer:2.3.0-build ``` ### Build customer image from production-build image ```bash diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/production/occlum-build.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/production/occlum-build.sh index e60560667cb..bdb8543bde2 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/production/occlum-build.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/production/occlum-build.sh @@ -1,7 +1,7 @@ # default -export container_name=2.3.0-SNAPSHOT-build-container -export image_name=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-SNAPSHOT -export final_name=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-SNAPSHOT-build +export container_name=2.3.0-build-container +export image_name=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0 +export final_name=intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum-production:2.3.0-build while getopts ":c:i:f:" opt do case $opt in diff --git a/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh b/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh index b91c976a0a0..4f50533b495 100644 --- a/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh +++ b/ppml/trusted-big-data-ml/scala/docker-occlum/start-spark-local.sh @@ -22,5 +22,5 @@ sudo docker run -it \ -e CHALLENGE=cHBtbAo= \ -e REPORT_DATA=ppml \ -e SGX_LOG_LEVEL=off \ - intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0-SNAPSHOT \ + intelanalytics/bigdl-ppml-trusted-big-data-ml-scala-occlum:2.3.0 \ bash /opt/run_spark_on_occlum_glibc.sh $1 diff --git a/ppml/trusted-bigdata/azure/submit-spark-sgx-az.sh b/ppml/trusted-bigdata/azure/submit-spark-sgx-az.sh index f200a849f96..196b9c91a84 100644 --- a/ppml/trusted-bigdata/azure/submit-spark-sgx-az.sh +++ b/ppml/trusted-bigdata/azure/submit-spark-sgx-az.sh @@ -3,7 +3,7 @@ export RUNTIME_DRIVER_MEMORY=8g RUNTIME_SPARK_MASTER= AZ_CONTAINER_REGISTRY= -BIGDL_VERSION=2.3.0-SNAPSHOT +BIGDL_VERSION=2.3.0 SGX_MEM=16g SPARK_EXTRA_JAR_PATH= SPARK_JOB_MAIN_CLASS= diff --git a/ppml/trusted-deep-learning/README.md b/ppml/trusted-deep-learning/README.md index 2108e2a5908..0d4a4c6291a 100644 --- a/ppml/trusted-deep-learning/README.md +++ b/ppml/trusted-deep-learning/README.md @@ -8,15 +8,15 @@ The following sections will show how to run a small demo using our currently-dev ## Before Running code ### 1. Build Docker Images -**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT` for a quick start, which is provided for a demo purpose. Do not use it for production. +**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0` for a quick start, which is provided for a demo purpose. Do not use it for production. #### 1.1 Build BigDL Base Image The bigdl base image is a public one that does not contain any secrets. You will use the base image to get your own custom image in the following steps. -Please be noted that the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0-SNAPSHOT` image relies on the `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` image. +Please be noted that the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0` image relies on the `intelanalytics/bigdl-ppml-gramine-base:2.3.0` image. -For the instructions on how to build the `gramine-base` image, check `ppml/base/README.md` in our repository. Another option is to use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` for a quick start. +For the instructions on how to build the `gramine-base` image, check `ppml/base/README.md` in our repository. Another option is to use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0` for a quick start. Before running the following command, please modify the paths in `../base/build-docker-image.sh`. Then build the docker image with the following command. @@ -36,7 +36,7 @@ It will generate a file `enclave-key.pem` in `ppml/trusted-deep-learning/ref` di openssl genrsa -3 -out enclave-key.pem 3072 ``` -Then, use the `enclave-key.pem` and the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0-SNAPSHOT` image to build your own custom image. In the process, SGX MREnclave will be made and signed without saving the sensitive enclave key inside the final image, which is safer. +Then, use the `enclave-key.pem` and the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0` image to build your own custom image. In the process, SGX MREnclave will be made and signed without saving the sensitive enclave key inside the final image, which is safer. Before running the following command, please modify the paths in `./build-custom-image.sh`. Then build the docker image with the following command. @@ -110,7 +110,7 @@ Now you should have all files required for TLS encryption, including `myCA.pem`, *WARNING: We are currently actively developing our images, which indicate that the ENTRYPOINT of the docker image may be changed accordingly. We will do our best to update our documentation in time.* -We have included a file named `mnist.py` in our `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT` image for test purpose. In the following sections, we will show how to run distributed PyTorch training in nodes with SGX enabled. +We have included a file named `mnist.py` in our `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0` image for test purpose. In the following sections, we will show how to run distributed PyTorch training in nodes with SGX enabled. Run the following script on nodes with SGX enabled: @@ -125,7 +125,7 @@ To run the following bash scripts, set the following parameters: #### On node one ```bash -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0 export CERTS_PATH="your_certs_path" sudo docker run -itd \ --net=host \ @@ -157,7 +157,7 @@ docker logs -f node_one #### On node two ```bash -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0 export CERTS_PATH="your_certs_path" sudo docker run -itd \ --net=host \ @@ -191,7 +191,7 @@ docker logs -f node_two #### On node three ```bash -export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT +export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0 export CERTS_PATH="your_certs_path" sudo docker run -itd \ --net=host \ diff --git a/ppml/trusted-deep-learning/base/Dockerfile b/ppml/trusted-deep-learning/base/Dockerfile index 1c5b8da7026..b2cfea7529e 100644 --- a/ppml/trusted-deep-learning/base/Dockerfile +++ b/ppml/trusted-deep-learning/base/Dockerfile @@ -1,4 +1,4 @@ -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG TINI_VERSION=v0.18.0 ARG BASE_IMAGE_NAME ARG BASE_IMAGE_TAG diff --git a/ppml/trusted-deep-learning/base/build-docker-image.sh b/ppml/trusted-deep-learning/base/build-docker-image.sh index d709d7ca21d..7a7fb9759bb 100644 --- a/ppml/trusted-deep-learning/base/build-docker-image.sh +++ b/ppml/trusted-deep-learning/base/build-docker-image.sh @@ -9,11 +9,11 @@ Proxy_Modified="sudo docker build \ --build-arg http_proxy=http://${HTTP_PROXY_HOST}:${HTTP_PROXY_PORT} \ --build-arg https_proxy=http://${HTTPS_PROXY_HOST}:${HTTPS_PROXY_PORT} \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0 -f ./Dockerfile ." No_Proxy_Modified="sudo docker build \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0 -f ./Dockerfile ." if [[ "$HTTP_PROXY_HOST" == "your_http_proxy_host" ]] || [[ "$HTTP_PROXY_PORT" == "your_http_proxy_port" ]] || [[ "$HTTPS_PROXY_HOST" == "your_https_proxy_host" ]] || [[ "$HTTPS_PROXY_PORT" == "your_https_proxy_port" ]] then diff --git a/ppml/trusted-deep-learning/ref/build-custom-image.sh b/ppml/trusted-deep-learning/ref/build-custom-image.sh index 4971f52ae2a..2acaa172846 100644 --- a/ppml/trusted-deep-learning/ref/build-custom-image.sh +++ b/ppml/trusted-deep-learning/ref/build-custom-image.sh @@ -9,11 +9,11 @@ Proxy_Modified="sudo docker build \ --build-arg http_proxy=http://${HTTP_PROXY_HOST}:${HTTP_PROXY_PORT} \ --build-arg https_proxy=http://${HTTPS_PROXY_HOST}:${HTTPS_PROXY_PORT} \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0 -f ./Dockerfile ." No_Proxy_Modified="sudo docker build \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-ref:2.3.0 -f ./Dockerfile ." if [[ "$HTTP_PROXY_HOST" == "your_http_proxy_host" ]] || [[ "$HTTP_PROXY_PORT" == "your_http_proxy_port" ]] || [[ "$HTTPS_PROXY_HOST" == "your_https_proxy_host" ]] || [[ "$HTTPS_PROXY_PORT" == "your_https_proxy_port" ]] then diff --git a/ppml/trusted-dl-serving/README.md b/ppml/trusted-dl-serving/README.md index 8ea173967bf..14c8252bc09 100644 --- a/ppml/trusted-dl-serving/README.md +++ b/ppml/trusted-dl-serving/README.md @@ -10,15 +10,15 @@ Besides, some demos and performance benchmark results will also be included in t ## Before Running code ### 1. Build Docker Images -**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0-SNAPSHOT` for a quick start, which is provided for a demo purpose. Do not use it for production. +**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0` for a quick start, which is provided for a demo purpose. Do not use it for production. ### 1.1 Build BigDL Base Image The bigdl base image is a public one that does not contain any secrets. You will use the base image to get your own custom image in the following steps. -Please be noted that the `intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0-SNAPSHOT` image relies on the `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` image. +Please be noted that the `intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0` image relies on the `intelanalytics/bigdl-ppml-gramine-base:2.3.0` image. -For the instructions on how to build the `gramine-base` image, check `ppml/base/README.md` in our repository. Another option is to use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` for a quick start. +For the instructions on how to build the `gramine-base` image, check `ppml/base/README.md` in our repository. Another option is to use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0` for a quick start. Before running the following command, please modify the paths in `../base/build-docker-image.sh`. Then build the docker image with the following command. @@ -38,7 +38,7 @@ It will generate a file `enclave-key.pem` in `ppml/trusted-deep-learning/ref` di openssl genrsa -3 -out enclave-key.pem 3072 ``` -Then, use the `enclave-key.pem` and the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0-SNAPSHOT` image to build your own custom image. In the process, SGX MREnclave will be made and signed without saving the sensitive enclave key inside the final image, which is safer. +Then, use the `enclave-key.pem` and the `intelanalytics/bigdl-ppml-trusted-deep-learning-gramine-base:2.3.0` image to build your own custom image. In the process, SGX MREnclave will be made and signed without saving the sensitive enclave key inside the final image, which is safer. Before running the following command, please modify the paths in `./build-custom-image.sh`. Then build the docker image with the following command. diff --git a/ppml/trusted-dl-serving/base/Dockerfile b/ppml/trusted-dl-serving/base/Dockerfile index d3d868350a5..e27855763b9 100644 --- a/ppml/trusted-dl-serving/base/Dockerfile +++ b/ppml/trusted-dl-serving/base/Dockerfile @@ -1,4 +1,4 @@ -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG TINI_VERSION=v0.18.0 ARG BASE_IMAGE_NAME ARG BASE_IMAGE_TAG diff --git a/ppml/trusted-dl-serving/base/build-docker-image.sh b/ppml/trusted-dl-serving/base/build-docker-image.sh index 87d15a5c443..c8697078c99 100644 --- a/ppml/trusted-dl-serving/base/build-docker-image.sh +++ b/ppml/trusted-dl-serving/base/build-docker-image.sh @@ -11,11 +11,11 @@ Proxy_Modified="sudo docker build \ --build-arg BASE_IMAGE_NAME=$BASE_IMAGE_NAME \ --build-arg BASE_IMAGE_TAG=$BASE_IMAGE_TAG \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0 -f ./Dockerfile ." No_Proxy_Modified="sudo docker build \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-base:2.3.0 -f ./Dockerfile ." if [[ "$HTTP_PROXY_HOST" == "your_http_proxy_host" ]] || [[ "$HTTP_PROXY_PORT" == "your_http_proxy_port" ]] || [[ "$HTTPS_PROXY_HOST" == "your_https_proxy_host" ]] || [[ "$HTTPS_PROXY_PORT" == "your_https_proxy_port" ]] then diff --git a/ppml/trusted-dl-serving/ref/build-custom-image.sh b/ppml/trusted-dl-serving/ref/build-custom-image.sh index caa5f5ed6c3..758f3ffc990 100644 --- a/ppml/trusted-dl-serving/ref/build-custom-image.sh +++ b/ppml/trusted-dl-serving/ref/build-custom-image.sh @@ -13,11 +13,11 @@ Proxy_Modified="sudo docker build \ --build-arg SGX_MEM_SIZE=MEM_SIZE_OF_SGX \ --build-arg SGX_LOG_LEVEL=LOG_LEVEL_OF_SGX \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0 -f ./Dockerfile ." No_Proxy_Modified="sudo docker build \ --build-arg no_proxy=x.x.x.x \ - -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0-SNAPSHOT -f ./Dockerfile ." + -t intelanalytics/bigdl-ppml-trusted-dl-serving-gramine-ref:2.3.0 -f ./Dockerfile ." if [[ "$HTTP_PROXY_HOST" == "your_http_proxy_host" ]] || [[ "$HTTP_PROXY_PORT" == "your_http_proxy_port" ]] || [[ "$HTTPS_PROXY_HOST" == "your_https_proxy_host" ]] || [[ "$HTTPS_PROXY_PORT" == "your_https_proxy_port" ]] then diff --git a/ppml/trusted-machine-learning/Dockerfile b/ppml/trusted-machine-learning/Dockerfile index 7dcc6dd6c79..04e120dc12a 100644 --- a/ppml/trusted-machine-learning/Dockerfile +++ b/ppml/trusted-machine-learning/Dockerfile @@ -1,7 +1,7 @@ ARG BASE_IMAGE_NAME=intelanalytics/bigdl-ppml-gramine-base -ARG BASE_IMAGE_TAG=2.3.0-SNAPSHOT +ARG BASE_IMAGE_TAG=2.3.0 ARG BIGDATA_IMAGE_NAME=intelanalytics/bigdl-ppml-trusted-bigdata-gramine-base -ARG BIGDATA_IMAGE_TAG=2.3.0-SNAPSHOT +ARG BIGDATA_IMAGE_TAG=2.3.0 ARG SPARK_VERSION=3.1.3 ARG TINI_VERSION=v0.18.0 ARG JDK_VERSION=8u192 diff --git a/ppml/trusted-machine-learning/README.md b/ppml/trusted-machine-learning/README.md index e665272eda1..8732be783f7 100644 --- a/ppml/trusted-machine-learning/README.md +++ b/ppml/trusted-machine-learning/README.md @@ -136,7 +136,7 @@ cd lgbm/kubernetes Modify parameters in `install-lgbm-trainer.sh`: ```bash -export imageName=intelanalytics/bigdl-ppml-trusted-machine-learning-gramine-reference:2.3.0-SNAPSHOT # You custom image name if needed +export imageName=intelanalytics/bigdl-ppml-trusted-machine-learning-gramine-reference:2.3.0 # You custom image name if needed export totalTrainerCount=2 # count of trainers as well as kubernetes pods export trainerPort=12400 # base port number, while the real port can be adapted export nfsMountPath=a_host_path_mounted_by_nfs_to_upload_data_before_training # the path you used to create kubernetes nfsvolumeclaim diff --git a/ppml/trusted-machine-learning/build-machine-learning-base-image.sh b/ppml/trusted-machine-learning/build-machine-learning-base-image.sh index ad21a91734f..e7bb45caa76 100644 --- a/ppml/trusted-machine-learning/build-machine-learning-base-image.sh +++ b/ppml/trusted-machine-learning/build-machine-learning-base-image.sh @@ -8,9 +8,9 @@ export LOCAL_IP=your_local_ip export LGBM_NETWORK_MODE_BUILD=SSL_OR_PLAIN export BASE_IMAGE_NAME=intelanalytics/bigdl-ppml-gramine-base -export BASE_IMAGE_TAG=2.3.0-SNAPSHOT +export BASE_IMAGE_TAG=2.3.0 export MACHINE_LEARNING_IMAGE_NAME=bigdl-ppml-trusted-machine-learning-gramine-base -export MACHINE_LEARNING_IMAGE_TAG=2.3.0-SNAPSHOT +export MACHINE_LEARNING_IMAGE_TAG=2.3.0 Proxy_Modified="sudo docker build \ --build-arg http_proxy=http://${HTTP_PROXY_HOST}:${HTTP_PROXY_PORT} \ diff --git a/ppml/trusted-machine-learning/custom-image/build-custom-image.sh b/ppml/trusted-machine-learning/custom-image/build-custom-image.sh index 9d0f0e9d806..f29465722c6 100644 --- a/ppml/trusted-machine-learning/custom-image/build-custom-image.sh +++ b/ppml/trusted-machine-learning/custom-image/build-custom-image.sh @@ -1,7 +1,7 @@ export CUSTOM_IMAGE_NAME=bigdl-ppml-trusted-machine-learning-gramine-custom -export CUSTOM_IMAGE_TAG=2.3.0-SNAPSHOT +export CUSTOM_IMAGE_TAG=2.3.0 export MACHINE_LEARNING_BASE_IMAGE_NAME=bigdl-ppml-trusted-machine-learning-gramine-base -export MACHINE_LEARNING_BASE_IMAGE_TAG=2.3.0-SNAPSHOT +export MACHINE_LEARNING_BASE_IMAGE_TAG=2.3.0 export SGX_MEM_SIZE=memory_size_of_sgx_in_custom_image export SGX_LOG_LEVEL=log_level_of_sgx_in_custom_image export ENABLE_DCAP_ATTESTATION=true diff --git a/ppml/trusted-python-toolkit/Dockerfile b/ppml/trusted-python-toolkit/Dockerfile index 03eefb2d559..c48a9596f11 100644 --- a/ppml/trusted-python-toolkit/Dockerfile +++ b/ppml/trusted-python-toolkit/Dockerfile @@ -2,7 +2,7 @@ ARG http_proxy ARG https_proxy ARG BASE_IMAGE_NAME ARG BASE_IMAGE_TAG -ARG BIGDL_VERSION=2.3.0-SNAPSHOT +ARG BIGDL_VERSION=2.3.0 ARG TINI_VERSION=v0.18.0 ARG JDK_VERSION=11 diff --git a/ppml/trusted-python-toolkit/README.md b/ppml/trusted-python-toolkit/README.md index 5343b198485..326d2c693b7 100644 --- a/ppml/trusted-python-toolkit/README.md +++ b/ppml/trusted-python-toolkit/README.md @@ -5,16 +5,16 @@ This image contains Gramine and some popular python toolkits including numpy, pa ## 1. Build Docker Images -**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-python-toolkit-ref:2.3.0-SNAPSHOT` for a quick start, which is provided for a demo purpose. Do not use it in production. +**Tip:** if you want to skip building the custom image, you can use our public image `intelanalytics/bigdl-ppml-trusted-python-toolkit-ref:2.3.0` for a quick start, which is provided for a demo purpose. Do not use it in production. ### 1.1 Build Gramine Base Image -Gramine base image provides necessary tools including gramine, python, java, etc for the image in this directory. You can build your own gramine base image following the steps in [Gramine PPML Base Image](https://github.com/intel-analytics/BigDL/tree/main/ppml/base#gramine-ppml-base-image). You can also use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` for a quick start. +Gramine base image provides necessary tools including gramine, python, java, etc for the image in this directory. You can build your own gramine base image following the steps in [Gramine PPML Base Image](https://github.com/intel-analytics/BigDL/tree/main/ppml/base#gramine-ppml-base-image). You can also use our public image `intelanalytics/bigdl-ppml-gramine-base:2.3.0` for a quick start. ### 1.2 Build Python Toolkit Base Image The python toolkit base image is a public one that does not contain any secrets. You will use the base image to get your own custom image. -You can use our public base image `intelanalytics/bigdl-ppml-trusted-python-toolkit-base:2.3.0-SNAPSHOT`, or, You can build your own base image based on `intelanalytics/bigdl-ppml-gramine-base:2.3.0-SNAPSHOT` as follows. Remember to assign values to the variables in `build-toolkit-base-image.sh` before running the script. +You can use our public base image `intelanalytics/bigdl-ppml-trusted-python-toolkit-base:2.3.0`, or, You can build your own base image based on `intelanalytics/bigdl-ppml-gramine-base:2.3.0` as follows. Remember to assign values to the variables in `build-toolkit-base-image.sh` before running the script. ```shell # configure parameters in build-toolkit-base-image.sh please