Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into ir_posix_regex
Browse files Browse the repository at this point in the history
Committed-by: bingqing.lbq from Dev container
  • Loading branch information
BingqingLyu committed Oct 18, 2023
2 parents a65b10b + 7cb43e9 commit 30c99b0
Show file tree
Hide file tree
Showing 54 changed files with 1,081 additions and 318 deletions.
51 changes: 51 additions & 0 deletions .github/free-disk-space.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


#
# The Azure provided machines typically have the following disk allocation:
# Total space: 85GB
# Allocated: 67 GB
# Free: 17 GB
# This script frees up 28 GB of disk space by deleting unneeded packages and
# large directories.
# The Flink end to end tests download and generate more than 17 GB of files,
# causing unpredictable behavior and build failures.
#
echo "=============================================================================="
echo "Freeing up disk space on CI system"
echo "=============================================================================="

echo "Listing 100 largest packages"
dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100
df -h
echo "Removing large packages"
sudo apt-get remove -y '^ghc-8.*'
sudo apt-get remove -y '^dotnet-.*'
sudo apt-get remove -y '^llvm-.*'
sudo apt-get remove -y 'php.*'
sudo apt-get remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel
sudo apt-get autoremove -y
sudo apt-get clean
df -h
echo "Removing large directories"
# deleting 15GB
rm -rf /usr/share/dotnet/
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/local/lib/android
sudo rm -rf /opt/ghc
sudo rm -rf /opt/hostedtoolcache/CodeQL
df -h
2 changes: 1 addition & 1 deletion .github/workflows/build-graphscope-images-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
- name: Build GraphScope Image
run: |
curl -sSL https://raw.githubusercontent.com/sighingnow/libclang/master/.github/free-disk-space.sh | bash
bash ${GITHUB_WORKSPACE}/.github/free-disk-space.sh || true
cd ${GITHUB_WORKSPACE}/k8s
df -h
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/build-graphscope-wheels-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,8 @@ jobs:

- name: Build Wheel Package
run: |
bash ${GITHUB_WORKSPACE}/.github/free-disk-space.sh || true
# Due to an observation of changing hostname in github runners,
# append 127.0.0.1 to etc/hosts to avoid DNS lookup.
r=`cat /etc/hosts | grep $(hostname) || true`
Expand Down
23 changes: 15 additions & 8 deletions .github/workflows/k8s-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ jobs:
run: |
cd ${GITHUB_WORKSPACE}/python
pip3 install -r requirements.txt -r requirements-dev.txt
pip3 install pytest pytest-cov pytest-timeout
pip3 install pytest pytest-cov pytest-timeout pytest-xdist
# build python client proto
cd ${GITHUB_WORKSPACE}/python
Expand Down Expand Up @@ -461,7 +461,10 @@ jobs:
export GS_ADDR=${NODE_IP}:${NODE_PORT}
cd ${GITHUB_WORKSPACE}/python
python3 -m pytest -s -vvv ./graphscope/tests/kubernetes/test_demo_script.py -k test_helm_installation
python3 -m pytest -d --tx popen//python=python3 \
-s -vvv \
./graphscope/tests/kubernetes/test_demo_script.py \
-k test_helm_installation
- uses: dashanji/kubernetes-log-export-action@v5
env:
Expand All @@ -488,10 +491,11 @@ jobs:
cd ${GITHUB_WORKSPACE}/python
export PATH=${HOME}/.local/bin:${PATH}
python3 -m pytest --ignore=./graphscope/tests/kubernetes/test_store_service.py \
--cov=graphscope --cov-config=.coveragerc --cov-report=xml \
--cov-report=term -s -vvv --log-cli-level=INFO \
./graphscope/tests/kubernetes
python3 -m pytest -d --tx popen//python=python3 \
--ignore=./graphscope/tests/kubernetes/test_store_service.py \
--cov=graphscope --cov-config=.coveragerc --cov-report=xml --cov-report=term \
-s -vvv --log-cli-level=INFO \
./graphscope/tests/kubernetes
- name: Upload Coverage
uses: codecov/codecov-action@v3
Expand Down Expand Up @@ -526,7 +530,10 @@ jobs:
# run test
cd ${GITHUB_WORKSPACE}/python
python3 -m pytest -s -vvv ./graphscope/tests/kubernetes/test_demo_script.py -k test_demo_on_hdfs
python3 -m pytest -d --tx popen//python=python3 \
-s -vvv \
./graphscope/tests/kubernetes/test_demo_script.py \
-k test_demo_on_hdfs
# Check the result file have successfully written to the given location
# hdfs dfs -test -e /ldbc_sample/res.csv_0 && hdfs dfs -test -e /ldbc_sample/res.csv_1
Expand Down Expand Up @@ -580,7 +587,7 @@ jobs:
run: |
cd ${GITHUB_WORKSPACE}/python
pip3 install -r requirements.txt -r requirements-dev.txt
pip3 install pytest pytest-cov pytest-timeout
pip3 install pytest pytest-cov pytest-timeout pytest-xdist
# build python client proto
python3 setup.py build_proto
Expand Down
73 changes: 42 additions & 31 deletions .github/workflows/local-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -281,20 +281,22 @@ jobs:
cd artifacts
tar -zxf ./wheel-${{ github.sha }}/client.tar.gz
pushd python/dist/wheelhouse
for f in * ; do python3 -m pip install $f || true; done
for f in * ; do python3 -m pip install --no-cache-dir $f || true; done
popd
# install graphscope
tar -zxf ./wheel-${{ github.sha }}/graphscope.tar.gz
pushd coordinator/dist
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
pushd coordinator/dist/wheelhouse
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
# install tensorflow
python3 -m pip install pytest "tensorflow" "pandas" --user
python3 -m pip install --no-cache-dir pytest pytest-xdist "tensorflow" "pandas" --user
# install pytorch
python3 -m pip install --no-cache-dir "torch" --index-url https://download.pytorch.org/whl/cpu
# install java
sudo apt update -y && sudo apt install openjdk-11-jdk -y
Expand All @@ -306,7 +308,9 @@ jobs:
env:
GS_TEST_DIR: ${{ github.workspace }}/gstest
run: |
python3 -m pytest -s -v $(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/tests/minitest
python3 -m pytest -d --tx popen//python=python3 \
-s -v \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/tests/minitest
- name: Upload GIE log
if: failure()
Expand Down Expand Up @@ -343,20 +347,20 @@ jobs:
cd artifacts
tar -zxf ./wheel-${{ github.sha }}/client.tar.gz
pushd python/dist/wheelhouse
for f in * ; do python3 -m pip install $f || true; done
for f in * ; do python3 -m pip install --no-cache-dir $f || true; done
popd
# install graphscope
tar -zxf ./wheel-${{ github.sha }}/graphscope.tar.gz
pushd coordinator/dist
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
pushd coordinator/dist/wheelhouse
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
# install pytest
python3 -m pip install pytest pytest-cov pytest-timeout
python3 -m pip install --no-cache-dir pytest pytest-cov pytest-timeout pytest-xdist
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
Expand All @@ -371,9 +375,11 @@ jobs:
# download dataset
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git ${GS_TEST_DIR}
python3 -m pytest -s -v --cov=graphscope --cov-config=python/.coveragerc \
--cov-report=xml --cov-report=term --exitfirst \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/tests/unittest
python3 -m pytest -d --tx popen//python=python3 \
-s -v \
--cov=graphscope --cov-config=python/.coveragerc --cov-report=xml --cov-report=term \
--exitfirst \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/tests/unittest
- name: Upload Coverage
if: ${{ needs.changes.outputs.gae-python == 'true' || github.ref == 'refs/heads/main' }}
Expand Down Expand Up @@ -429,20 +435,20 @@ jobs:
cd artifacts
tar -zxf ./wheel-${{ github.sha }}/client.tar.gz
pushd python/dist/wheelhouse
for f in * ; do python3 -m pip install $f || true; done
for f in * ; do python3 -m pip install --no-cache-dir $f || true; done
popd
# install graphscope
tar -zxf ./wheel-${{ github.sha }}/graphscope.tar.gz
pushd coordinator/dist
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
pushd coordinator/dist/wheelhouse
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
# install pytest
python3 -m pip install pytest
python3 -m pip install --no-cache-dir pytest pytest-xdist
# download dataset
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git ${GS_TEST_DIR}
Expand All @@ -458,9 +464,10 @@ jobs:
GS_TEST_DIR: ${{ github.workspace }}/gstest
run: |
pip3 show networkx
python3 -m pytest --exitfirst -s -vvv \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests \
--ignore=$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests/convert
python3 -m pytest -d --tx popen//python=python3 \
--exitfirst -s -vvv \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests \
--ignore=$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests/convert
- name: Convert Test
if: ${{ needs.changes.outputs.networkx == 'true' && steps.nx-filter.outputs.convert == 'true' }}
Expand All @@ -469,8 +476,9 @@ jobs:
GS_TEST_DIR: ${{ github.workspace }}/gstest
run: |
pip3 show networkx
python3 -m pytest --exitfirst -s -vvv \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests/convert
python3 -m pytest -d --tx popen//python=python3 \
--exitfirst -s -vvv \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/tests/convert
networkx-algo-and-generator-test:
runs-on: ubuntu-20.04
Expand Down Expand Up @@ -514,20 +522,20 @@ jobs:
cd artifacts
tar -zxf ./wheel-${{ github.sha }}/client.tar.gz
pushd python/dist/wheelhouse
for f in * ; do python3 -m pip install $f || true; done
for f in * ; do python3 -m pip install --no-cache-dir $f || true; done
popd
# install graphscope
tar -zxf ./wheel-${{ github.sha }}/graphscope.tar.gz
pushd coordinator/dist
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
pushd coordinator/dist/wheelhouse
python3 -m pip install ./*.whl
python3 -m pip install --no-cache-dir ./*.whl
popd
# install pytest
python3 -m pip install pytest
python3 -m pip install --no-cache-dir pytest pytest-xdist
# download dataset
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git ${GS_TEST_DIR}
Expand All @@ -539,8 +547,9 @@ jobs:
GS_TEST_DIR: ${{ github.workspace }}/gstest
run: |
pip3 show networkx
python3 -m pytest --exitfirst -s -v \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/algorithms/tests/builtin
python3 -m pytest -d --tx popen//python=python3 \
--exitfirst -s -v \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/algorithms/tests/builtin
- name: Generator test
if: ${{ needs.changes.outputs.networkx == 'true' && steps.nx-filter.outputs.generator == 'true' }}
Expand All @@ -549,13 +558,15 @@ jobs:
GS_TEST_DIR: ${{ github.workspace }}/gstest
run: |
pip3 show networkx
python3 -m pytest --exitfirst -s -v \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/generators/tests
python3 -m pytest -d --tx popen//python=python3 \
--exitfirst -s -v \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/generators/tests
- name: Readwrite test
if: ${{ needs.changes.outputs.networkx == 'true' && steps.nx-filter.outputs.io == 'true' }}
env:
DEPLOYMENT: ${{ matrix.deployment }}
run: |
python3 -m pytest --exitfirst -s -v -m "not slow" \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/readwrite/tests
python3 -m pytest -d --tx popen//python=python3 \
--exitfirst -s -v -m "not slow" \
$(dirname $(python3 -c "import graphscope; print(graphscope.__file__)"))/nx/readwrite/tests
14 changes: 10 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ ARCH := $(shell uname -m)

VERSION := $(shell cat $(WORKING_DIR)/VERSION)

# pip installation arguments
PIP_ARGS = --timeout=1000 --no-cache-dir

## Common
.PHONY: all graphscope install clean

Expand Down Expand Up @@ -75,7 +78,8 @@ clean:

client: learning
cd $(CLIENT_DIR) && \
python3 -m pip install -r requirements.txt -r requirements-dev.txt --user && \
python3 -m pip install ${PIP_ARGS} "torch" --index-url https://download.pytorch.org/whl/cpu --user && \
python3 -m pip install ${PIP_ARGS} -r requirements.txt -r requirements-dev.txt --user && \
export PATH=$(PATH):$(HOME)/.local/bin && \
python3 setup.py build_ext --inplace --user && \
if [ $(WITH_GLTORCH) = ON ]; then \
Expand All @@ -86,7 +90,8 @@ client: learning

coordinator: client
cd $(COORDINATOR_DIR) && \
python3 -m pip install -r requirements.txt -r requirements-dev.txt --user && \
python3 -m pip install ${PIP_ARGS} "torch" --index-url https://download.pytorch.org/whl/cpu --user && \
python3 -m pip install ${PIP_ARGS} -r requirements.txt -r requirements-dev.txt --user && \
python3 setup.py build_builtin && \
python3 -m pip install --user --editable $(COORDINATOR_DIR) && \
rm -rf $(COORDINATOR_DIR)/*.egg-info
Expand Down Expand Up @@ -169,8 +174,9 @@ $(LEARNING_DIR)/graphlearn/built/lib/libgraphlearn_shared.$(SUFFIX):

prepare-client:
cd $(CLIENT_DIR) && \
pip3 install -r requirements.txt --user && \
pip3 install -r requirements-dev.txt --user && \
pip3 install ${PIP_ARGS} "torch" --index-url https://download.pytorch.org/whl/cpu --user && \
pip3 install ${PIP_ARGS} -r requirements.txt --user && \
pip3 install ${PIP_ARGS} -r requirements-dev.txt --user && \
python3 setup.py build_proto

graphscope-docs: prepare-client
Expand Down
Loading

0 comments on commit 30c99b0

Please sign in to comment.