Skip to content

Commit

Permalink
Merge pull request #237 from phonopy/numpy2
Browse files Browse the repository at this point in the history
Update for numpy 2.0
  • Loading branch information
atztogo authored Jun 19, 2024
2 parents 32cab8a + 935e65b commit 97a0305
Show file tree
Hide file tree
Showing 10 changed files with 80 additions and 29 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/phono3py-pytest-conda-mkl-phphmtblas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ jobs:
python-version: ["3.12"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/phono3py-pytest-conda-mkl-v2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ jobs:
python-version: ["3.12"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/phono3py-pytest-conda-mkl.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ jobs:
python-version: ["3.12"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
Expand Down
52 changes: 52 additions & 0 deletions .github/workflows/phono3py-pytest-conda-numpy2.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: Pytest with openblas and numpy 2.0

on:
pull_request:
branches: [ develop ]

jobs:
build-linux:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python-version: ["3.12"]

steps:
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
run: |
conda activate test
conda install --yes python=${{ matrix.python-version }}
conda install --yes matplotlib-base pyyaml "libblas=*=*openblas" openblas h5py "numpy=2" scipy pytest codecov pytest-cov cmake c-compiler
- name: Install spglib develop branch
run: |
conda activate test
git clone --depth 1 https://github.com/spglib/spglib.git
cd spglib
pip install -e . -vvv
cd ..
- name: Install phonopy develop branch
run: |
conda activate test
git clone --depth 1 https://github.com/phonopy/phonopy.git
cd phonopy
PHONOPY_USE_OPENMP=true pip install -e . -vvv
cd ..
- name: Install phono3py
run: |
conda activate test
pip install -e . -vvv
- name: Run pytest
run: |
pytest -v test
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
verbose: true
4 changes: 2 additions & 2 deletions .github/workflows/phono3py-pytest-conda-phphmtblas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ jobs:
python-version: ["3.12"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/phono3py-pytest-conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ jobs:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
- name: Install dependent packages
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish-gh-pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ jobs:
python-version: ["3.11"]

steps:
- uses: actions/checkout@v2
- uses: conda-incubator/setup-miniconda@v2
- uses: actions/checkout@v4
- uses: conda-incubator/setup-miniconda@v3
with:
auto-update-conda: true
channels: conda-forge
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish-to-test-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ jobs:
python-version: ["3.10", ]

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
# Use conda-incubator/setup-miniconda for precise control of conda infrastructure
- uses: conda-incubator/setup-miniconda@v2
- uses: conda-incubator/setup-miniconda@v3
with:
auto-update-conda: true
channels: conda-forge
Expand Down
28 changes: 14 additions & 14 deletions phono3py/file_IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def write_fc3_to_hdf5(fc3, filename="fc3.hdf5", p2s_map=None, compression="gzip"
"""
with h5py.File(filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("fc3", data=fc3, compression=compression)
if p2s_map is not None:
w.create_dataset("p2s_map", data=p2s_map)
Expand Down Expand Up @@ -352,9 +352,9 @@ def write_force_constants_to_hdf5(
dset = w.create_dataset(
"physical_unit", (1,), dtype="S%d" % len(physical_unit)
)
dset[0] = np.string_(physical_unit)
dset[0] = np.bytes_(physical_unit)
if version is not None:
w.create_dataset("version", data=np.string_(version))
w.create_dataset("version", data=np.bytes_(version))

write_force_constants_to_hdf5(
force_constants,
Expand Down Expand Up @@ -385,7 +385,7 @@ def write_grid_address_to_hdf5(
suffix = _get_filename_suffix(mesh, filename=filename)
full_filename = "grid_address" + suffix + ".hdf5"
with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
w.create_dataset("grid_matrix", data=bz_grid.grid_matrix)
Expand Down Expand Up @@ -577,7 +577,7 @@ def write_real_self_energy_to_hdf5(
full_filename += ".hdf5"

with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("grid_point", data=grid_point)
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
Expand Down Expand Up @@ -666,7 +666,7 @@ def write_spectral_function_to_hdf5(
full_filename += ".hdf5"

with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("grid_point", data=grid_point)
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
Expand Down Expand Up @@ -713,7 +713,7 @@ def write_collision_to_hdf5(
)
full_filename = "collision" + suffix + ".hdf5"
with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("temperature", data=temperature)
if gamma is not None:
w.create_dataset("gamma", data=gamma)
Expand Down Expand Up @@ -751,7 +751,7 @@ def write_collision_to_hdf5(
def write_full_collision_matrix(collision_matrix, filename="fcm.hdf5"):
"""Write full (non-symmetrized) collision matrix to collision-*.hdf5."""
with h5py.File(filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("collision_matrix", data=collision_matrix)


Expand All @@ -776,7 +776,7 @@ def write_unitary_matrix_to_hdf5(
)
hdf5_filename = "unitary" + suffix + ".hdf5"
with h5py.File(hdf5_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("temperature", data=temperature)
if unitary_matrix is not None:
w.create_dataset("unitary_matrix", data=unitary_matrix)
Expand Down Expand Up @@ -816,7 +816,7 @@ def write_collision_eigenvalues_to_hdf5(
mesh, sigma=sigma, sigma_cutoff=sigma_cutoff, filename=filename
)
with h5py.File("coleigs" + suffix + ".hdf5", "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("temperature", data=temperatures)
w.create_dataset("collision_eigenvalues", data=collision_eigenvalues)
w.close()
Expand Down Expand Up @@ -885,7 +885,7 @@ def write_kappa_to_hdf5(
)
full_filename = "kappa" + suffix + ".hdf5"
with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("temperature", data=temperature)
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
Expand Down Expand Up @@ -1159,7 +1159,7 @@ def write_pp_to_hdf5(
full_filename = "pp" + suffix + ".hdf5"

with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
if pp is not None:
if g_zero is None:
w.create_dataset("pp", data=pp, compression=compression)
Expand Down Expand Up @@ -1323,7 +1323,7 @@ def write_gamma_detail_to_hdf5(
full_filename = "gamma_detail" + suffix + ".hdf5"

with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("temperature", data=temperature)
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
Expand Down Expand Up @@ -1397,7 +1397,7 @@ def write_phonon_to_hdf5(
full_filename = "phonon" + suffix + ".hdf5"

with h5py.File(full_filename, "w") as w:
w.create_dataset("version", data=np.string_(__version__))
w.create_dataset("version", data=np.bytes_(__version__))
w.create_dataset("mesh", data=mesh)
if bz_grid is not None and bz_grid.grid_matrix is not None:
w.create_dataset("grid_matrix", data=bz_grid.grid_matrix)
Expand Down
1 change: 0 additions & 1 deletion test/phonon/test_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -2470,7 +2470,6 @@ def test_GridMatrix_with_supercell_symmetry(ph_nacl: Phonopy):
np.testing.assert_array_equal(gm.D_diag, [4, 4, 4])


@pytest.mark.filterwarnings("error")
def test_GridMatrix_with_supercell_symmetry_grg_false(ph_nacl: Phonopy):
"""Test of GridMatrix with supercell symmetry.
Expand Down

0 comments on commit 97a0305

Please sign in to comment.