diff --git a/conda-lock.yml b/conda-lock.yml index df14a1f..cdcd564 100644 --- a/conda-lock.yml +++ b/conda-lock.yml @@ -5,7 +5,7 @@ # available, unless you explicitly update the lock file. # # Install this environment as "YOURENV" with: -# conda-lock install -n YOURENV --file conda-lock.yml +# conda-lock install -n YOURENV conda-lock.yml # To update a single package to the latest version compatible with the version constraints in the source: # conda-lock lock --lockfile conda-lock.yml --update PACKAGE # To re-solve the entire environment, e.g. after changing a version constraint in the source file: @@ -13,7 +13,7 @@ version: 1 metadata: content_hash: - linux-64: b3bc921f3b2ec6c9fc6b9f5479fa5d65e011bd6f00f3f2c719e1f507a9c774fa + linux-64: 0f28bf119e9aecbe59da3e974a43c9f6041a0e6c062afdec626ab674018f0315 channels: - url: conda-forge used_env_vars: [] @@ -33,4876 +33,514 @@ package: sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 category: main optional: false -- name: ca-certificates - version: 2023.7.22 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda - hash: - md5: a73ecd2988327ad4c8f2c331482917f2 - sha256: 525b7b6b5135b952ec1808de84e5eca57c7c7ff144e29ef3e96ae4040ff432c1 - category: main - optional: false -- name: conda-ecosystem-user-package-isolation - version: '1.0' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/conda-ecosystem-user-package-isolation-1.0-ha770c72_1.tar.bz2 - hash: - md5: ae754334312bf9346a0f75d1d21bc24f - sha256: 0692b32ffc84f4b98f05b700693ae40607af9431c56ddcc42bd130238723bbec - category: main - optional: false -- name: fenics-ufcx - version: 0.6.0 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/fenics-ufcx-0.6.0-h56297ac_0.conda - hash: - md5: 6424710417c308e1e5a711de04e7b4cb - sha256: d3abe7a32493d2c69f86b18961837a629156fcbb38bb8509da6479f3e8d689f0 - category: main - optional: false -- name: font-ttf-dejavu-sans-mono - version: '2.37' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 - hash: - md5: 0c96522c6bdaed4b1566d11387caaf45 - sha256: 58d7f40d2940dd0a8aa28651239adbf5613254df0f75789919c4e6762054403b - category: main - optional: false -- name: font-ttf-inconsolata - version: '3.000' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 - hash: - md5: 34893075a5c9e55cdafac56607368fc6 - sha256: c52a29fdac682c20d252facc50f01e7c2e7ceac52aa9817aaf0bb83f7559ec5c - category: main - optional: false -- name: font-ttf-source-code-pro - version: '2.038' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 - hash: - md5: 4d59c254e01d9cde7957100457e2d5fb - sha256: 00925c8c055a2275614b4d983e1df637245e19058d79fc7dd1a93b8d9fb4b139 - category: main - optional: false -- name: font-ttf-ubuntu - version: '0.83' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2 - hash: - md5: 19410c3df09dfb12d1206132a1d357c5 - sha256: 470d5db54102bd51dbb0c5990324a2f4a0bc976faa493b22193338adb9882e2e - category: main - optional: false -- name: kernel-headers_linux-64 - version: 2.6.32 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_16.conda - hash: - md5: 7ca122655873935e02c91279c5b03c8c - sha256: aaa8aa6dc776d734a6702032588ff3c496721da905366d91162e3654c082aef0 - category: main - optional: false -- name: ld_impl_linux-64 - version: '2.40' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda - hash: - md5: 7aca3059a1729aa76c597603f10b0dd3 - sha256: f6cc89d887555912d6c61b295d398cff9ec982a3417d38025c45d5dd9b9e79cd - category: main - optional: false -- name: libboost-headers - version: 1.82.0 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/libboost-headers-1.82.0-ha770c72_6.conda - hash: - md5: a943dcb8fd22cf23ce901ac84f6538c2 - sha256: c996950b85808115ea833e577a0af2969dbb0378c299560c2b945401a7770823 - category: main - optional: false -- name: libgcc-devel_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-devel_linux-64-12.3.0-h8bca6fd_2.conda - hash: - md5: ed613582de7b8569fdc53ca141be176a - sha256: 7e12d0496389017ca526254913b24d9024e1728c849a0d6476a4b7fde9d03cba - category: main - optional: false -- name: libstdcxx-devel_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-devel_linux-64-12.3.0-h8bca6fd_2.conda - hash: - md5: 7268a17e56eb099d1b8869bbbf46de4c - sha256: e8483069599561ef24b884c898442eadc510190f978fa388db3281b10c3c084e - category: main - optional: false -- name: libstdcxx-ng - version: 13.2.0 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda - hash: - md5: 9172c297304f2a20134fc56c97fbe229 - sha256: ab22ecdc974cdbe148874ea876d9c564294d5eafa760f403ed4fd495307b4243 - category: main - optional: false -- name: mpi - version: '1.0' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-openmpi.tar.bz2 - hash: - md5: 1dcc49e16749ff79ba2194fa5d4ca5e7 - sha256: 54cf44ee2c122bce206f834a825af06e3b14fc4fd58c968ae9329715cc281d1e - category: main - optional: false -- name: mumps-include - version: 5.2.1 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/mumps-include-5.2.1-ha770c72_11.tar.bz2 - hash: - md5: 765196257c11b54dc52522d2fcafdd69 - sha256: 583922c671eb90e576a96432fe1439bc5eb46f88c4a0a4867fba39653c09c6de - category: main - optional: false -- name: nlohmann_json - version: 3.11.2 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.2-h27087fc_0.tar.bz2 - hash: - md5: b7743cf3f8da023abe95afc215111555 - sha256: 55ac71e0431267b30b3bc9ea0238d1b9dc69644938d213511749c71b91506a7b - category: main - optional: false -- name: pybind11-abi - version: '4' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/pybind11-abi-4-hd8ed1ab_3.tar.bz2 - hash: - md5: 878f923dd6acc8aeb47a75da6c4098be - sha256: d4fb485b79b11042a16dc6abfb0c44c4f557707c2653ac47c81e5d32b24a3bb0 - category: main - optional: false -- name: python_abi - version: '3.10' - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda - hash: - md5: 26322ec5d7712c3ded99dd656142b8ce - sha256: 456bec815bfc2b364763084d08b412fdc4c17eb9ccc66a36cb775fa7ac3cbaec - category: main - optional: false -- name: tzdata - version: 2023c - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda - hash: - md5: 939e3e74d8be4dac89ce83b20de2492a - sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 - category: main - optional: false -- name: utfcpp - version: 3.2.5 - manager: conda - platform: linux-64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/utfcpp-3.2.5-ha770c72_0.conda - hash: - md5: b3599de486d88c2c447501cd2b053a99 - sha256: 91c89693a4bd7f9aec874528506ba08dfc6c9acc3aec88dfb315dad6adba0c8b - category: main - optional: false -- name: fonts-conda-forge - version: '1' - manager: conda - platform: linux-64 - dependencies: - font-ttf-dejavu-sans-mono: '' - font-ttf-inconsolata: '' - font-ttf-source-code-pro: '' - font-ttf-ubuntu: '' - url: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 - hash: - md5: f766549260d6815b0c52253f1fb1bb29 - sha256: 53f23a3319466053818540bcdf2091f253cbdbab1e0e9ae7b9e509dcaa2a5e38 - category: main - optional: false -- name: libgomp - version: 13.2.0 - manager: conda - platform: linux-64 - dependencies: - _libgcc_mutex: '0.1' - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda - hash: - md5: e2042154faafe61969556f28bade94b9 - sha256: e1e82348f8296abfe344162b3b5f0ddc2f504759ebeb8b337ba99beaae583b15 - category: main - optional: false -- name: sysroot_linux-64 - version: '2.12' - manager: conda - platform: linux-64 - dependencies: - kernel-headers_linux-64: 2.6.32 - url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda - hash: - md5: 071ea8dceff4d30ac511f4a2f8437cd1 - sha256: 4c024b2eee24c6da7d3e08723111ec02665c578844c5b3e9e6b38f89000bec41 - category: main - optional: false -- name: _openmp_mutex - version: '4.5' - manager: conda - platform: linux-64 - dependencies: - _libgcc_mutex: '0.1' - libgomp: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - hash: - md5: 73aaf86a425cc6e73fcf236a5a46396d - sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 - category: main - optional: false -- name: binutils_impl_linux-64 - version: '2.40' - manager: conda - platform: linux-64 - dependencies: - ld_impl_linux-64: '2.40' - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-hf600244_0.conda - hash: - md5: 33084421a8c0af6aef1b439707f7662a - sha256: a7e0ea2b71a5b03d82e5a58fb6b612ab1c44d72ce161f9aa441f7ba467cd4c8d - category: main - optional: false -- name: fonts-conda-ecosystem - version: '1' - manager: conda - platform: linux-64 - dependencies: - fonts-conda-forge: '' - url: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 - hash: - md5: fee5683a3f04bd15cbd8318b096a27ab - sha256: a997f2f1921bb9c9d76e6fa2f6b408b7fa549edd349a77639c9fe7a23ea93e61 - category: main - optional: false -- name: binutils_linux-64 - version: '2.40' - manager: conda - platform: linux-64 - dependencies: - binutils_impl_linux-64: 2.40.* - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/binutils_linux-64-2.40-hbdbef99_2.conda - hash: - md5: adfebae9fdc63a598495dfe3b006973a - sha256: 333f3339d94c93bcc02a723e3e460cb6ff6075e05f5247e15bef5dcdcec541a3 - category: main - optional: false -- name: libgcc-ng - version: 13.2.0 - manager: conda - platform: linux-64 - dependencies: - _libgcc_mutex: '0.1' - _openmp_mutex: '>=4.5' - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda - hash: - md5: c28003b0be0494f9a7664389146716ff - sha256: d361d3c87c376642b99c1fc25cddec4b9905d3d9b9203c1c545b8c8c1b04539a - category: main - optional: false -- name: alsa-lib - version: 1.2.10 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda - hash: - md5: 75dae9a4201732aa78a530b826ee5fe0 - sha256: 51147922bad9d3176e780eb26f748f380cd3184896a9f9125d8ac64fe330158b - category: main - optional: false -- name: aom - version: 3.6.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aom-3.6.1-h59595ed_0.conda - hash: - md5: 8457db6d1175ee86c8e077f6ac60ff55 - sha256: 006d10fe845374e71fb15a6c1f58ae4b3efef69be02b0992265abfb5c4c2e026 - category: main - optional: false -- name: attr - version: 2.5.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2 - hash: - md5: d9c69a24ad678ffce24c6543a0176b00 - sha256: 82c13b1772c21fc4a17441734de471d3aabf82b61db9b11f4a1bd04a9c4ac324 - category: main - optional: false -- name: bzip2 - version: 1.0.8 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2 - hash: - md5: a1fd65c7ccbf10880423d82bca54eb54 - sha256: cb521319804640ff2ad6a9f118d972ed76d86bea44e5626c09a13d38f562e1fa - category: main - optional: false -- name: c-ares - version: 1.20.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.20.1-hd590300_1.conda - hash: - md5: 2facbaf5ee1a56967aecaee89799160e - sha256: 1700d9ebfd3b21c8b50e12a502f26e015719e1f3dbb5d491b5be061cf148ca7a - category: main - optional: false -- name: dav1d - version: 1.2.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda - hash: - md5: 418c6ca5929a611cbd69204907a83995 - sha256: 22053a5842ca8ee1cf8e1a817138cdb5e647eb2c46979f84153f6ad7bde73020 - category: main - optional: false -- name: double-conversion - version: 3.3.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda - hash: - md5: c2f83a5ddadadcdb08fe05863295ee97 - sha256: 9eee491a73b67fd64379cf715f85f8681568ebc1f02f9e11b4c50d46a3323544 - category: main - optional: false -- name: eigen - version: 3.4.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/eigen-3.4.0-h00ab1b0_0.conda - hash: - md5: b1b879d6d093f55dd40d58b5eb2f0699 - sha256: 53b15a98aadbe0704479bacaf7a5618fcb32d1577be320630674574241639b34 - category: main - optional: false -- name: fribidi - version: 1.0.10 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2 - hash: - md5: ac7bc6a654f8f41b352b38f4051135f8 - sha256: 5d7b6c0ee7743ba41399e9e05a58ccc1cfc903942e49ff6f677f6e423ea7a627 - category: main - optional: false -- name: gettext - version: 0.21.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2 - hash: - md5: 14947d8770185e5153fdd04d4673ed37 - sha256: 4fcfedc44e4c9a053f0416f9fc6ab6ed50644fca3a761126dbd00d09db1f546a - category: main - optional: false -- name: gmp - version: 6.2.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - libstdcxx-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.2.1-h58526e2_0.tar.bz2 - hash: - md5: b94cf2db16066b242ebd26db2facbd56 - sha256: 07a5319e1ac54fe5d38f50c60f7485af7f830b036da56957d0bfb7558a886198 - category: main - optional: false -- name: graphite2 - version: 1.3.13 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - libstdcxx-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2 - hash: - md5: 8c54672728e8ec6aa6db90cf2806d220 - sha256: 65da967f3101b737b08222de6a6a14e20e480e7d523a5d1e19ace7b960b5d6b1 - category: main - optional: false -- name: icu - version: '73.2' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda - hash: - md5: cc47e1facc155f91abd89b11e48e72ff - sha256: e12fd90ef6601da2875ebc432452590bc82a893041473bc1c13ef29001a73ea8 - category: main - optional: false -- name: jsoncpp - version: 1.9.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.4.0' - libstdcxx-ng: '>=9.4.0' - url: https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.5-h4bd325d_1.tar.bz2 - hash: - md5: ae7f50dd1e78c7e78b5d2cf7062e559d - sha256: 7a5a6cdfc17849bb8000cc31b91c22f1fe0e087dfc3fd59ecc4d3b64cf0ad772 - category: main - optional: false -- name: jxrlib - version: '1.1' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-h7f98852_2.tar.bz2 - hash: - md5: 8e787b08fe19986d99d034b839df2961 - sha256: 3ffc19c2ca272e6d5b8edc7cfc5bb71763dfdfa1810dd4b8820cc6b212ecbd95 - category: main - optional: false -- name: keyutils - version: 1.6.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - hash: - md5: 30186d27e2c9fa62b45fb1476b7200e3 - sha256: 150c05a6e538610ca7c43beb3a40d65c90537497a4f6a5f4d15ec0451b6f5ebb - category: main - optional: false -- name: lame - version: '3.100' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - hash: - md5: a8832b479f93521a9e7b5b743803be51 - sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab - category: main - optional: false -- name: lerc - version: 4.0.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 - hash: - md5: 76bbff344f0134279f225174e9064c8f - sha256: cb55f36dcd898203927133280ae1dc643368af041a48bcf7c026acb7c47b0c12 - category: main - optional: false -- name: libaec - version: 1.1.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda - hash: - md5: 127b0be54c1c90760d7fe02ea7a56426 - sha256: fdde15e74dc099ab1083823ec0f615958e53d9a8fae10405af977de251668bea - category: main - optional: false -- name: libbrotlicommon - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda - hash: - md5: aec6c91c7371c26392a06708a73c70e5 - sha256: 40f29d1fab92c847b083739af86ad2f36d8154008cf99b64194e4705a1725d78 - category: main - optional: false -- name: libdeflate - version: '1.19' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda - hash: - md5: 1635570038840ee3f9c71d22aa5b8b6d - sha256: 985ad27aa0ba7aad82afa88a8ede6a1aacb0aaca950d710f15d85360451e72fd - category: main - optional: false -- name: libev - version: '4.33' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2 - hash: - md5: 6f8720dff19e17ce5d48cfe7f3d2f0a3 - sha256: 8c9635aa0ea28922877dc96358f9547f6a55fc7e2eb75a556b05f1725496baf9 - category: main - optional: false -- name: libexpat - version: 2.5.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda - hash: - md5: 6305a3dd2752c76335295da4e581f2fd - sha256: 74c98a563777ae2ad71f1f74d458a8ab043cee4a513467c159ccf159d0e461f3 - category: main - optional: false -- name: libffi - version: 3.4.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.4.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - hash: - md5: d645c6d2ac96843a2bfaccd2d62b3ac3 - sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e - category: main - optional: false -- name: libgfortran5 - version: 13.2.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=13.2.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda - hash: - md5: 78fdab09d9138851dde2b5fe2a11019e - sha256: 55ecf5c46c05a98b4822a041d6e1cb196a7b0606126eb96b24131b7d2c8ca561 - category: main - optional: false -- name: libiconv - version: '1.17' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 - hash: - md5: b62b52da46c39ee2bc3c162ac7f1804d - sha256: 6a81ebac9f1aacdf2b4f945c87ad62b972f0f69c8e0981d68e111739e6720fd7 - category: main - optional: false -- name: libjpeg-turbo - version: 2.1.5.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda - hash: - md5: 323e90742f0f48fc22bea908735f55e6 - sha256: 0ef7378818c6d5b407692d02556c32e2f6af31c7542bca5160d0b92a59427fb5 - category: main - optional: false -- name: libnsl - version: 2.0.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - hash: - md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 - sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 - category: main - optional: false -- name: libogg - version: 1.3.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2 - hash: - md5: 6e8cc2173440d77708196c5b93771680 - sha256: b88afeb30620b11bed54dac4295aa57252321446ba4e6babd7dce4b9ffde9b25 - category: main - optional: false -- name: libopus - version: 1.3.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2 - hash: - md5: 15345e56d527b330e1cacbdf58676e8f - sha256: 0e1c2740ebd1c93226dc5387461bbcf8142c518f2092f3ea7551f77755decc8f - category: main - optional: false -- name: libpciaccess - version: '0.17' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.17-h166bdaf_0.tar.bz2 - hash: - md5: b7463391cf284065294e2941dd41ab95 - sha256: 9fe4aaf5629b4848d9407b9ed4da941ba7e5cebada63ee0becb9aa82259dc6e2 - category: main - optional: false -- name: libsanitizer - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-12.3.0-h0f45ef3_2.conda - hash: - md5: 4655db64eca78a6fcc4fb654fc1f8d57 - sha256: a58add0b4477c59aee324b508d834267360b659f9c543f551ca4442196e656fe - category: main - optional: false -- name: libsodium - version: 1.0.18 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2 - hash: - md5: c3788462a6fbddafdb413a9f9053e58d - sha256: 53da0c8b79659df7b53eebdb80783503ce72fb4b10ed6e9e05cc0e9e4207a130 - category: main - optional: false -- name: libtasn1 - version: 4.19.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libtasn1-4.19.0-h166bdaf_0.tar.bz2 - hash: - md5: 93840744a8552e9ebf6bb1a5dffc125a - sha256: 5bfeada0e1c6ec2574afe2d17cdbc39994d693a41431338a6cb9dfa7c4d7bfc8 - category: main - optional: false -- name: libunistring - version: 0.9.10 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libunistring-0.9.10-h7f98852_0.tar.bz2 - hash: - md5: 7245a044b4a1980ed83196176b78b73a - sha256: e88c45505921db29c08df3439ddb7f771bbff35f95e7d3103bf365d5d6ce2a6d - category: main - optional: false -- name: libuuid - version: 2.38.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - hash: - md5: 40b61aab5c7ba9ff276c41cfffe6b80b - sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 - category: main - optional: false -- name: libvpx - version: 1.13.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.13.1-h59595ed_0.conda - hash: - md5: 0974a6d3432e10bae02bcab0cce1b308 - sha256: 8067e73d6e4f82eae158cb86acdc2d1cf18dd7f13807f0b93e13a07ee4c04b79 - category: main - optional: false -- name: libwebp-base - version: 1.3.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda - hash: - md5: 30de3fd9b3b602f7473f30e684eeea8c - sha256: 68764a760fa81ef35dacb067fe8ace452bbb41476536a4a147a1051df29525f0 - category: main - optional: false -- name: libzlib - version: 1.2.13 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda - hash: - md5: f36c115f1ee199da648e0597ec2047ad - sha256: 370c7c5893b737596fd6ca0d9190c9715d89d888b8c88537ae1ef168c25e82e4 - category: main - optional: false -- name: lz4-c - version: 1.9.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - hash: - md5: 318b08df404f9c9be5712aaa5a6f0bb0 - sha256: 1b4c105a887f9b2041219d57036f72c4739ab9e9fe5a1486f094e58c76b31f5f - category: main - optional: false -- name: metis - version: 5.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-h59595ed_1007.conda - hash: - md5: 40ccb8318df2500f83bd868dd8fcd201 - sha256: 446bf794497284e2ffa28ab9191d70c38d372c51e3fd073f0d8b35efb51e7e02 - category: main - optional: false -- name: mpg123 - version: 1.32.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.3-h59595ed_0.conda - hash: - md5: bdadff838d5437aea83607ced8b37f75 - sha256: f02b8ed16b3a488938b5f9453d19ea315ce6ed0d46cc389ecfaa28f2a4c3cb16 - category: main - optional: false -- name: ncurses - version: '6.4' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda - hash: - md5: 681105bccc2a3f7f1a837d47d39c9179 - sha256: ccf61e61d58a8a7b2d66822d5568e2dc9387883dd9b2da61e1d787ece4c4979a - category: main - optional: false -- name: nettle - version: 3.8.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/nettle-3.8.1-hc379101_1.tar.bz2 - hash: - md5: 3cb2c7df59990bd37c2ce27fd906de68 - sha256: 49c569a69608eee784e815179a70c6ae4d088dac42b7df999044f68058d593bb - category: main - optional: false -- name: nspr - version: '4.35' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - hash: - md5: da0ec11a6454ae19bff5b02ed881a2b1 - sha256: 8fadeebb2b7369a4f3b2c039a980d419f65c7b18267ba0c62588f9f894396d0c - category: main - optional: false -- name: openh264 - version: 2.3.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/openh264-2.3.1-hcb278e6_2.conda - hash: - md5: 37d01894f256b2a6921c5a218f42f8a2 - sha256: 3be6de15d40f02c9bb34d5095c65b6b3f07e04fc21a0fb63d1885f1a31de5ae2 - category: main - optional: false -- name: openssl - version: 3.1.3 - manager: conda - platform: linux-64 - dependencies: - ca-certificates: '' - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.3-hd590300_0.conda - hash: - md5: 7bb88ce04c8deb9f7d763ae04a1da72f - sha256: f4e35f506c7e8ab7dfdc47255b0d5aa8ce0c99028ae0affafd274333042c4f70 - category: main - optional: false -- name: pixman - version: 0.42.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda - hash: - md5: 700edd63ccd5fc66b70b1c028cea9a68 - sha256: ae917851474eb3b08812b02c9e945d040808523ec53f828aa74a90b0cdf15f57 - category: main - optional: false -- name: pkg-config - version: 0.29.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/pkg-config-0.29.2-h36c2ea0_1008.tar.bz2 - hash: - md5: fbef41ff6a4c8140c30057466a1cdd47 - sha256: 8b35a077ceccdf6888f1e82bd3ea281175014aefdc2d4cf63d7a4c7e169c125c - category: main - optional: false -- name: pthread-stubs - version: '0.4' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - url: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - hash: - md5: 22dad4df6e8630e8dff2428f6f6a7036 - sha256: 67c84822f87b641d89df09758da498b2d4558d47b920fd1d3fe6d3a871e000ff - category: main - optional: false -- name: pugixml - version: '1.13' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.13-h59595ed_1.conda - hash: - md5: a7c81a1cf43fe01da4954c9a3001396a - sha256: 3390defa1f68cc0adcbd35d05c679d3b9955fa0d9334774c1012e771046d9d7a - category: main - optional: false -- name: rapidjson - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.3.0' - libstdcxx-ng: '>=7.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/rapidjson-1.1.0-he1b5a44_1002.tar.bz2 - hash: - md5: 37d4fdbb92d573c7d6ab6de74a666dc4 - sha256: 73b74a21dcaafc4a9f43e7f4295ead29d0f3ef13790bad69351942b77294aad8 - category: main - optional: false -- name: snappy - version: 1.1.10 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda - hash: - md5: e6d228cd0bb74a51dd18f5bfce0b4115 - sha256: 02219f2382b4fe39250627dade087a4412d811936a5a445636b7260477164eac - category: main - optional: false -- name: svt-av1 - version: 1.7.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-1.7.0-h59595ed_0.conda - hash: - md5: b6e0b4f1edc2740d1cf87669195c39d4 - sha256: e79878bba3b013db1b59766895a182dd12d2e1a45e24c01b61b4e922ed8500b6 - category: main - optional: false -- name: x264 - version: 1!164.3095 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2 - hash: - md5: 6c99772d483f566d59e25037fea2c4b1 - sha256: 175315eb3d6ea1f64a6ce470be00fa2ee59980108f246d3072ab8b977cb048a5 - category: main - optional: false -- name: x265 - version: '3.5' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - libstdcxx-ng: '>=10.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2 - hash: - md5: e7f6ed84d4623d52ee581325c1587a6b - sha256: 76c7405bcf2af639971150f342550484efac18219c0203c5ee2e38b8956fe2a0 - category: main - optional: false -- name: xorg-kbproto - version: 1.0.7 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - hash: - md5: 4b230e8381279d76131116660f5a241a - sha256: e90b0a6a5d41776f11add74aa030f789faf4efd3875c31964d6f9cfa63a10dd1 - category: main - optional: false -- name: xorg-libice - version: 1.1.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda - hash: - md5: b462a33c0be1421532f28bfe8f4a7514 - sha256: 5aa9b3682285bb2bf1a8adc064cb63aff76ef9178769740d855abb42b0d24236 - category: main - optional: false -- name: xorg-libxau - version: 1.0.11 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - hash: - md5: 2c80dc38fface310c9bd81b17037fee5 - sha256: 309751371d525ce50af7c87811b435c176915239fc9e132b99a25d5e1703f2d4 - category: main - optional: false -- name: xorg-libxdmcp - version: 1.1.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - hash: - md5: be93aabceefa2fac576e971aef407908 - sha256: 4df7c5ee11b8686d3453e7f3f4aa20ceef441262b49860733066c52cfd0e4a77 - category: main - optional: false -- name: xorg-renderproto - version: 0.11.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 - hash: - md5: 06feff3d2634e3097ce2fe681474b534 - sha256: 38942930f233d1898594dd9edf4b0c0786f3dbc12065a0c308634c37fd936034 - category: main - optional: false -- name: xorg-xextproto - version: 7.3.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - hash: - md5: bce9f945da8ad2ae9b1d7165a64d0f87 - sha256: b8dda3b560e8a7830fe23be1c58cc41f407b2e20ae2f3b6901eb5842ba62b743 - category: main - optional: false -- name: xorg-xf86vidmodeproto - version: 2.3.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2 - hash: - md5: 3ceea9668625c18f19530de98b15d5b0 - sha256: 43398aeacad5b8753b7a1c12cb6bca36124e0c842330372635879c350c430791 - category: main - optional: false -- name: xorg-xproto - version: 7.0.31 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - hash: - md5: b4a4381d54784606820704f7b5f05a15 - sha256: f197bb742a17c78234c24605ad1fe2d88b1d25f332b75d73e5ba8cf8fbc2a10d - category: main - optional: false -- name: xz - version: 5.2.6 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - hash: - md5: 2161070d867d1b1204ea749c8eec4ef0 - sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 - category: main - optional: false -- name: yaml - version: 0.2.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.4.0' - url: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 - hash: - md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae - sha256: a4e34c710eeb26945bdbdaba82d3d74f60a78f54a874ec10d373811a5d217535 - category: main - optional: false -- name: zfp - version: 0.5.5 - manager: conda - platform: linux-64 - dependencies: - _openmp_mutex: '>=4.5' - libgcc-ng: '>=9.4.0' - libstdcxx-ng: '>=9.4.0' - url: https://conda.anaconda.org/conda-forge/linux-64/zfp-0.5.5-h9c3ff4c_8.tar.bz2 - hash: - md5: f12900b1e1e0527c0e9a4e922a5de2bf - sha256: 22f90931ae2d6f084107cd5a5cac5d065df7d23150356ffc56eba50260562174 - category: main - optional: false -- name: expat - version: 2.5.0 - manager: conda - platform: linux-64 - dependencies: - libexpat: 2.5.0 - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda - hash: - md5: 8b9b5aca60558d02ddaa09d599e55920 - sha256: 36dfeb4375059b3bba75ce9b38c29c69fd257342a79e6cf20e9f25c1523f785f - category: main - optional: false -- name: gcc_impl_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: - binutils_impl_linux-64: '>=2.39' - libgcc-devel_linux-64: 12.3.0 - libgcc-ng: '>=12.3.0' - libgomp: '>=12.3.0' - libsanitizer: 12.3.0 - libstdcxx-ng: '>=12.3.0' - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-12.3.0-he2b93b0_2.conda - hash: - md5: 2f4d8677dc7dd87f93e9abfb2ce86808 - sha256: 62a897343229e6dc4a3ace4f419a30e60a0a22ce7d0eac0b9bfb8f0308cf3de5 - category: main - optional: false -- name: hdf4 - version: 4.2.15 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda - hash: - md5: c3e9338e15d90106f467377017352b97 - sha256: 8ad0e739f106e2937e36a2177d012165bc2173fac0f0b941c5796d85f854f9be - category: main - optional: false -- name: imath - version: 3.1.9 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/imath-3.1.9-hfc55251_0.conda - hash: - md5: 73f8b3aecf00f4a7eee44faed06164b3 - sha256: 81b28259a6020fea4ea97ff5997abadc38aae8b7f6f8afdaad17925d31a5f518 - category: main - optional: false -- name: libbrotlidec - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libbrotlicommon: 1.1.0 - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda - hash: - md5: f07002e225d7a60a694d42a7bf5ff53f - sha256: 86fc861246fbe5ad85c1b6b3882aaffc89590a48b42d794d3d5c8e6d99e5f926 - category: main - optional: false -- name: libbrotlienc - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libbrotlicommon: 1.1.0 - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda - hash: - md5: 5fc11c6020d421960607d821310fcd4d - sha256: f751b8b1c4754a2a8dfdc3b4040fa7818f35bbf6b10e905a47d3a194b746b071 - category: main - optional: false -- name: libcap - version: '2.69' - manager: conda - platform: linux-64 - dependencies: - attr: '>=2.5.1,<2.6.0a0' - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda - hash: - md5: 25cb5999faa414e5ccb2c1388f62d3d5 - sha256: 942f9564b4228609f017b6617425d29a74c43b8a030e12239fa4458e5cb6323c - category: main - optional: false -- name: libdrm - version: 2.4.114 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libpciaccess: '>=0.17,<0.18.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.114-h166bdaf_0.tar.bz2 - hash: - md5: efb58e80f5d0179a783c4e76c3df3b9c - sha256: 9316075084ad66f9f96d31836e83303a8199eec93c12d68661e41c44eed101e3 - category: main - optional: false -- name: libedit - version: 3.1.20191231 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=7.5.0' - ncurses: '>=6.2,<7.0.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 - hash: - md5: 4d331e44109e3f0e19b4cb8f9b82f3e1 - sha256: a57d37c236d8f7c886e01656f4949d9dcca131d2a0728609c6f7fa338b65f1cf - category: main - optional: false -- name: libevent - version: 2.1.12 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda - hash: - md5: a1cfcc585f0c42bf8d5546bb1dfb668d - sha256: 2e14399d81fb348e9d231a82ca4d816bf855206923759b69ad006ba482764131 - category: main - optional: false -- name: libflac - version: 1.4.3 - manager: conda - platform: linux-64 - dependencies: - gettext: '>=0.21.1,<1.0a0' - libgcc-ng: '>=12' - libogg: '>=1.3.4,<1.4.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda - hash: - md5: ee48bf17cc83a00f59ca1494d5646869 - sha256: 65908b75fa7003167b8a8f0001e11e58ed5b1ef5e98b96ab2ba66d7c1b822c7d - category: main - optional: false -- name: libgfortran-ng - version: 13.2.0 - manager: conda - platform: linux-64 - dependencies: - libgfortran5: 13.2.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda - hash: - md5: e75a75a6eaf6f318dae2631158c46575 - sha256: 767d71999e5386210fe2acaf1b67073e7943c2af538efa85c101e3401e94ff62 - category: main - optional: false -- name: libgpg-error - version: '1.47' - manager: conda - platform: linux-64 - dependencies: - gettext: '>=0.21.1,<1.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda - hash: - md5: c2097d0b46367996f09b4e8e4920384a - sha256: 0306b3c2d65863048983a50bd8b86f6f26e457ef55d1da745a5796af25093f5a - category: main - optional: false -- name: libidn2 - version: 2.3.4 - manager: conda - platform: linux-64 - dependencies: - gettext: '>=0.21.1,<1.0a0' - libgcc-ng: '>=12' - libunistring: '>=0,<1.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libidn2-2.3.4-h166bdaf_0.tar.bz2 - hash: - md5: 7440fbafd870b8bab68f83a064875d34 - sha256: 888848ae85be9df86f56407639c63bdce8e7651f0b2517be9bc0ac6e38b2d21d - category: main - optional: false -- name: libnghttp2 - version: 1.52.0 - manager: conda - platform: linux-64 - dependencies: - c-ares: '>=1.18.1,<2.0a0' - libev: '>=4.33,<4.34.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.0.8,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda - hash: - md5: 613955a50485812985c059e7b269f42e - sha256: ecd6b08c2b5abe7d1586428c4dd257dcfa00ee53700d79cdc8bca098fdfbd79a - category: main - optional: false -- name: libpng - version: 1.6.39 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda - hash: - md5: e1c890aebdebbfbf87e2c917187b4416 - sha256: a32b36d34e4f2490b99bddbc77d01a674d304f667f0e62c89e02c961addef462 - category: main - optional: false -- name: libsqlite - version: 3.43.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.2-h2797004_0.conda - hash: - md5: 4b441a1ee22397d5a27dc1126b849edd - sha256: b30279b67fce2382a93c638625ff2b284324e2347e30bd0acab813d89289c18a - category: main - optional: false -- name: libssh2 - version: 1.11.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - hash: - md5: 1f5a58e686b13bcfde88b93f547d23fe - sha256: 50e47fd9c4f7bf841a11647ae7486f65220cfc988ec422a4475fe8d5a823824d - category: main - optional: false -- name: libvorbis - version: 1.3.7 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - libogg: '>=1.3.4,<1.4.0a0' - libstdcxx-ng: '>=9.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2 - hash: - md5: 309dec04b70a3cc0f1e84a4013683bc0 - sha256: 53080d72388a57b3c31ad5805c93a7328e46ff22fab7c44ad2a86d712740af33 - category: main - optional: false -- name: libxcb - version: '1.15' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - pthread-stubs: '' - xorg-libxau: '' - xorg-libxdmcp: '' - url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda - hash: - md5: 33277193f5b92bad9fdd230eb700929c - sha256: a670902f0a3173a466c058d2ac22ca1dd0df0453d3a80e0212815c20a16b0485 - category: main - optional: false -- name: libxml2 - version: 2.11.5 - manager: conda - platform: linux-64 - dependencies: - icu: '>=73.2,<74.0a0' - libgcc-ng: '>=12' - libiconv: '>=1.17,<2.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda - hash: - md5: f3858448893839820d4bcfb14ad3ecdf - sha256: 1b3cb6864de1a558ea5fb144c780121d52507837d15df0600491d8ed92cff90c - category: main - optional: false -- name: libzip - version: 1.10.1 - manager: conda - platform: linux-64 - dependencies: - bzip2: '>=1.0.8,<2.0a0' - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda - hash: - md5: ac79812548e7e8cf61f7b0abdef01d3b - sha256: 84e93f189072dcfcbe77744f19c7e4171523fbecfaba7352e5a23bbe014574c7 - category: main - optional: false -- name: mpfr - version: 4.2.1 - manager: conda - platform: linux-64 - dependencies: - gmp: '>=6.2.1,<7.0a0' - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h9458935_0.conda - hash: - md5: 4c28f3210b30250037a4a627eeee9e0f - sha256: 008230a53ff15cf61966476b44f7ba2c779826825b9ca639a0a2b44d8f7aa6cb - category: main - optional: false -- name: mysql-common - version: 8.0.33 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - openssl: '>=3.1.3,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_5.conda - hash: - md5: 1e8ef4090ca4f0d66404a7441e1dbf3c - sha256: 1b5114244f28e416d42e9ac0c61dfdabdfd7255b9fe752a07ca157a3e3893d2a - category: main - optional: false -- name: p11-kit - version: 0.24.1 - manager: conda - platform: linux-64 - dependencies: - libffi: '>=3.4.2,<3.5.0a0' - libgcc-ng: '>=12' - libtasn1: '>=4.18.0,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/p11-kit-0.24.1-hc5aa10d_0.tar.bz2 - hash: - md5: 56ee94e34b71742bbdfa832c974e47a8 - sha256: aa8d3887b36557ad0c839e4876c0496e0d670afe843bf5bba4a87764b868196d - category: main - optional: false -- name: pcre2 - version: '10.40' - manager: conda - platform: linux-64 - dependencies: - bzip2: '>=1.0.8,<2.0a0' - libgcc-ng: '>=12' - libzlib: '>=1.2.12,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2 - hash: - md5: 69e2c796349cd9b273890bee0febfe1b - sha256: 7a29ec847556eed4faa1646010baae371ced69059a4ade43851367a076d6108a - category: main - optional: false -- name: readline - version: '8.2' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - ncurses: '>=6.3,<7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - hash: - md5: 47d31b792659ce70f470b5c82fdfb7a4 - sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 - category: main - optional: false -- name: tk - version: 8.6.13 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-h2797004_0.conda - hash: - md5: 513336054f884f95d9fd925748f41ef3 - sha256: 679e944eb93fde45d0963a22598fafacbb429bb9e7ee26009ba81c4e0c435055 - category: main - optional: false -- name: xorg-fixesproto - version: '5.0' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - xorg-xextproto: '' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2 - hash: - md5: 65ad6e1eb4aed2b0611855aff05e04f6 - sha256: 5d2af1b40f82128221bace9466565eca87c97726bb80bbfcd03871813f3e1876 - category: main - optional: false -- name: xorg-libsm - version: 1.2.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libuuid: '>=2.38.1,<3.0a0' - xorg-libice: '>=1.1.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda - hash: - md5: 93ee23f12bc2e684548181256edd2cf6 - sha256: 089ad5f0453c604e18985480218a84b27009e9e6de9a0fa5f4a20b8778ede1f1 - category: main - optional: false -- name: zeromq - version: 4.3.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libsodium: '>=1.0.18,<1.0.19.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h59595ed_0.conda - hash: - md5: 8851084c192dbc56215ac4e3c9aa30fa - sha256: 53bf2a18224406e9806adb3b270a2c8a028aca0c89bd40114a85d6446f5c98d1 - category: main - optional: false -- name: zlib - version: 1.2.13 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libzlib: 1.2.13 - url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda - hash: - md5: 68c34ec6149623be41a1933ab996a209 - sha256: 9887a04d7e7cb14bd2b52fa01858f05a6d7f002c890f618d9fcd864adbfecb1b - category: main - optional: false -- name: zstd - version: 1.5.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda - hash: - md5: 04b88013080254850d6c01ed54810589 - sha256: 607cbeb1a533be98ba96cf5cdf0ddbb101c78019f1fda063261871dad6248609 - category: main - optional: false -- name: blosc - version: 1.21.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - snappy: '>=1.1.10,<2.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda - hash: - md5: 009521b7ed97cca25f8f997f9e745976 - sha256: e2b15b017775d1bda8edbb1bc48e545e45364edefa4d926732fc5488cc600731 - category: main - optional: false -- name: brotli-bin - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libbrotlidec: 1.1.0 - libbrotlienc: 1.1.0 - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda - hash: - md5: 39f910d205726805a958da408ca194ba - sha256: a641abfbaec54f454c8434061fffa7fdaa9c695e8a5a400ed96b4f07c0c00677 - category: main - optional: false -- name: freetype - version: 2.12.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libpng: '>=1.6.39,<1.7.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda - hash: - md5: 9ae35c3d96db2c94ce0cef86efdfa2cb - sha256: b2e3c449ec9d907dd4656cb0dc93e140f447175b125a3824b31368b06c666bb6 - category: main - optional: false -- name: gcc_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: - binutils_linux-64: '2.40' - gcc_impl_linux-64: 12.3.0.* - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/gcc_linux-64-12.3.0-h76fc315_2.conda - hash: - md5: 11517e7b5c910c5b5d6985c0c7eb7f50 - sha256: 86f6db7399ec0362e4c4025939debbfebc8ad9ccef75e3c0e4069f85b149f24d - category: main - optional: false -- name: gl2ps - version: 1.4.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - libpng: '>=1.6.37,<1.7.0a0' - zlib: '>=1.2.11,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-h0708190_0.tar.bz2 - hash: - md5: 438718bf8921ac70956d919d0e2cc487 - sha256: feaf757731cfb8231d8a6c5b3446bbc428aa1cca126f09628ccafaa98a80f022 - category: main - optional: false -- name: gnutls - version: 3.7.8 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libidn2: '>=2,<3.0a0' - libstdcxx-ng: '>=12' - libtasn1: '>=4.19.0,<5.0a0' - nettle: '>=3.8.1,<3.9.0a0' - p11-kit: '>=0.24.1,<0.25.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/gnutls-3.7.8-hf3e180e_0.tar.bz2 - hash: - md5: cbe8e27140d67c3f30e01cfb642a6e7c - sha256: 4a47e4558395b98fff4c1c44ad358dade62b350a03b5a784d4bc589d6eb7ac9e - category: main - optional: false -- name: gxx_impl_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: - gcc_impl_linux-64: 12.3.0 - libstdcxx-devel_linux-64: 12.3.0 - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-12.3.0-he2b93b0_2.conda - hash: - md5: f89b9916afc36fc5562fbfc11330a8a2 - sha256: 1ca91c1a3892b61da7efe150f9a1830e18aac82f563b27bf707520cb3297cc7a - category: main - optional: false -- name: krb5 - version: 1.21.2 - manager: conda - platform: linux-64 - dependencies: - keyutils: '>=1.6.1,<2.0a0' - libedit: '>=3.1.20191231,<4.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda - hash: - md5: cd95826dbd331ed1be26bdf401432844 - sha256: 259bfaae731989b252b7d2228c1330ef91b641c9d68ff87dae02cbae682cb3e4 - category: main - optional: false -- name: libboost - version: 1.82.0 - manager: conda - platform: linux-64 - dependencies: - bzip2: '>=1.0.8,<2.0a0' - icu: '>=73.2,<74.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - xz: '>=5.2.6,<6.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libboost-1.82.0-h6fcfa73_6.conda - hash: - md5: 05c40141d4184953616797d5c3d7947f - sha256: c820f1ca7a2844fc5589bb101cc33188e06205ccb022051e13a6398def22e8bf - category: main - optional: false -- name: libgcrypt - version: 1.10.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - libgpg-error: '>=1.44,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2 - hash: - md5: f967fc95089cd247ceed56eda31de3a9 - sha256: 8fd7e6db1021cd9298d9896233454de204116840eb66a06fcb712e1015ff132a - category: main - optional: false -- name: libglib - version: 2.78.0 - manager: conda - platform: linux-64 - dependencies: - gettext: '>=0.21.1,<1.0a0' - libffi: '>=3.4,<4.0a0' - libgcc-ng: '>=12' - libiconv: '>=1.17,<2.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - pcre2: '>=10.40,<10.41.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda - hash: - md5: e618003da3547216310088478e475945 - sha256: 96ec4dc5e38f434aa5862cb46d74923cce1445de3cd0b9d61e3e63102b163af6 - category: main - optional: false -- name: libhwloc - version: 2.9.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libxml2: '>=2.11.5,<2.12.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda - hash: - md5: f36ddc11ca46958197a45effdd286e45 - sha256: 6950fee24766d03406e0f6f965262a5d98829c71eed8d1004f313892423b559b - category: main - optional: false -- name: libllvm15 - version: 15.0.7 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libxml2: '>=2.11.4,<2.12.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda - hash: - md5: 9efe82d44b76a7529a1d702e5a37752e - sha256: bb94e7535a309c2a8d58585cb82bac954ed59f473eef2cac6ea677d6f576a3b6 - category: main - optional: false -- name: libopenblas - version: 0.3.24 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda - hash: - md5: 6e4ef6ca28655124dcde9bd500e44c32 - sha256: c8e080ae4d57506238023e98869928ae93564e6407ef5b0c4d3a337e8c2b7662 - category: main - optional: false -- name: libsndfile - version: 1.2.2 - manager: conda - platform: linux-64 - dependencies: - lame: '>=3.100,<3.101.0a0' - libflac: '>=1.4.3,<1.5.0a0' - libgcc-ng: '>=12' - libogg: '>=1.3.4,<1.4.0a0' - libopus: '>=1.3.1,<2.0a0' - libstdcxx-ng: '>=12' - libvorbis: '>=1.3.7,<1.4.0a0' - mpg123: '>=1.32.1,<1.33.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda - hash: - md5: ef1910918dd895516a769ed36b5b3a4e - sha256: f709cbede3d4f3aee4e2f8d60bd9e256057f410bd60b8964cb8cf82ec1457573 - category: main - optional: false -- name: libtheora - version: 1.1.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - libogg: '>=1.3.4,<1.4.0a0' - libpng: '>=1.6.37,<1.7.0a0' - libvorbis: '>=1.3.7,<1.4.0a0' - zlib: '>=1.2.11,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h7f98852_1005.tar.bz2 - hash: - md5: 1a7c35f56343b7e9e8db20b296c7566c - sha256: 048ce34ba5b143f099cca3d388dfc41acf24d634dd00c5b1c463fb81bf804070 - category: main - optional: false -- name: libtiff - version: 4.6.0 - manager: conda - platform: linux-64 - dependencies: - lerc: '>=4.0.0,<5.0a0' - libdeflate: '>=1.19,<1.20.0a0' - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libstdcxx-ng: '>=12' - libwebp-base: '>=1.3.2,<2.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - xz: '>=5.2.6,<6.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda - hash: - md5: 4e9afd30f4ccb2f98645e51005f82236 - sha256: 16f70e3170b9acb5b5a9e7fe60fd9b1104c946e165a48882ebf38ecb7978e980 - category: main - optional: false -- name: mysql-libs - version: 8.0.33 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - mysql-common: 8.0.33 - openssl: '>=3.1.3,<4.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_5.conda - hash: - md5: b72f016c910ff9295b1377d3e17da3f2 - sha256: 90a5c105e35990cac53f45366c256d88f4c8f66a360afd37dcae1357e370ade6 - category: main - optional: false -- name: nss - version: '3.94' - manager: conda - platform: linux-64 - dependencies: - __glibc: '>=2.17,<3.0.a0' - libgcc-ng: '>=12' - libsqlite: '>=3.43.0,<4.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - nspr: '>=4.35,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda - hash: - md5: 7caef74bbfa730e014b20f0852068509 - sha256: c9b7910fc554c6550905b9150f4c8230e973ca63f41b42f2c18a49e8aa458e78 - category: main - optional: false -- name: openexr - version: 3.2.1 - manager: conda - platform: linux-64 - dependencies: - imath: '>=3.1.9,<3.1.10.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/openexr-3.2.1-h3f0fd8d_0.conda - hash: - md5: 76df8e97c9d28062f4ea8a356bda1232 - sha256: fedd59f40d5d620844f1691a7e24f8ea23c453f98717b8f8b61e71fdd8b7c354 - category: main - optional: false -- name: openmpi - version: 4.1.6 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - mpi: '1.0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/openmpi-4.1.6-hc5af2df_101.conda - hash: - md5: f9a2ad0088ee38f396350515fa37d243 - sha256: f0769dd891e1735be4606ec8643951e5cbca199f774e58c7d933f70a70134ce4 - category: main - optional: false -- name: python - version: 3.10.12 - manager: conda - platform: linux-64 - dependencies: - bzip2: '>=1.0.8,<2.0a0' - ld_impl_linux-64: '>=2.36.1' - libffi: '>=3.4,<4.0a0' - libgcc-ng: '>=12' - libnsl: '>=2.0.0,<2.1.0a0' - libsqlite: '>=3.42.0,<4.0a0' - libuuid: '>=2.38.1,<3.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - ncurses: '>=6.4,<7.0a0' - openssl: '>=3.1.1,<4.0a0' - readline: '>=8.2,<9.0a0' - tk: '>=8.6.12,<8.7.0a0' - tzdata: '' - xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda - hash: - md5: eb6f1df105f37daedd6dca78523baa75 - sha256: 05e2a7ce916d259f11979634f770f31027d0a5d18463b094e64a30500f900699 - category: main - optional: false -- name: scotch - version: 6.0.9 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - libzlib: '>=1.2.11,<1.3.0a0' - zlib: '>=1.2.11,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/scotch-6.0.9-hb2e6521_2.tar.bz2 - hash: - md5: 20eb1f0c247d10da95b1da761e7f4c10 - sha256: 88be3ee9b49716657d8429fbb9b6ce4eb65efd79b7e660636775a858cb077921 - category: main - optional: false -- name: sqlite - version: 3.43.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libsqlite: 3.43.2 - libzlib: '>=1.2.13,<1.3.0a0' - ncurses: '>=6.4,<7.0a0' - readline: '>=8.2,<9.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.2-h2c6b66d_0.conda - hash: - md5: c37b95bcd6c6833dacfd5df0ae2f4303 - sha256: f49389e9cce5bdc451d1c5b56972cf5f75b1ba00350d35ab099848e65b32e94f - category: main - optional: false -- name: xcb-util - version: 0.4.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda - hash: - md5: 9bfac7ccd94d54fd21a0501296d60424 - sha256: 0c91d87f0efdaadd4e56a5f024f8aab20ec30f90aa2ce9e4ebea05fbc20f71ad - category: main - optional: false -- name: xcb-util-keysyms - version: 0.4.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda - hash: - md5: 632413adcd8bc16b515cab87a2932913 - sha256: 8451d92f25d6054a941b962179180728c48c62aab5bf20ac10fef713d5da6a9a - category: main - optional: false -- name: xcb-util-renderutil - version: 0.3.9 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda - hash: - md5: e995b155d938b6779da6ace6c6b13816 - sha256: 6987588e6fff5892056021c2ea52f7a0deefb2c7348e70d24750e2d60dabf009 - category: main - optional: false -- name: xcb-util-wm - version: 0.4.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda - hash: - md5: 90108a432fb5c6150ccfee3f03388656 - sha256: 08ba7147c7579249b6efd33397dc1a8c2404278053165aaecd39280fee705724 - category: main - optional: false -- name: xorg-libx11 - version: 1.8.7 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - xorg-kbproto: '' - xorg-xextproto: '>=7.3.0,<8.0a0' - xorg-xproto: '' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda - hash: - md5: 49e482d882669206653b095f5206c05b - sha256: 7a02a7beac472ae2759498550b5fc5261bf5be7a9a2b4648a3f67818a7bfefcf - category: main - optional: false -- name: alabaster - version: 0.7.13 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda - hash: - md5: 06006184e203b61d3525f90de394471e - sha256: b2d160a050996950434c6e87a174fc01c4a937cbeffbdd20d1b46126b4478a95 - category: main - optional: false -- name: attrs - version: 23.1.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda - hash: - md5: 3edfead7cedd1ab4400a6c588f3e75f8 - sha256: 063639cd568f5c7a557b0fb1cc27f098598c0d8ff869088bfeb82934674f8821 - category: main - optional: false -- name: brotli - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - brotli-bin: 1.1.0 - libbrotlidec: 1.1.0 - libbrotlienc: 1.1.0 - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda - hash: - md5: f27a24d46e3ea7b70a1f98e50c62508f - sha256: f2d918d351edd06c55a6c2d84b488fe392f85ea018ff227daac07db22b408f6b - category: main - optional: false -- name: brotli-python - version: 1.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda - hash: - md5: 1f95722c94f00b69af69a066c7433714 - sha256: e22268d81905338570786921b3def88e55f9ed6d0ccdd17d9fbae31a02fbef69 - category: main - optional: false -- name: cached_property - version: 1.5.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 - hash: - md5: 576d629e47797577ab0f1b351297ef4a - sha256: 6dbf7a5070cc43d90a1e4c2ec0c541c69d8e30a0e25f50ce9f6e4a432e42c5d7 - category: main - optional: false -- name: certifi - version: 2023.7.22 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda - hash: - md5: 7f3dbc9179b4dde7da98dfb151d0ad22 - sha256: db66e31866ff4250c190788769e3a8a1709237c3e9c38d7143aae95ab75fcb31 - category: main - optional: false -- name: charset-normalizer - version: 3.3.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.1-pyhd8ed1ab_0.conda - hash: - md5: 985378f74689fccce52f158027bd9acd - sha256: a31739c49c4b1c8e0cbdec965ba152683d36ce6e23bdaefcfee99937524dabd1 - category: main - optional: false -- name: click - version: 8.1.7 - manager: conda - platform: linux-64 - dependencies: - __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - hash: - md5: f3ad426304898027fc619827ff428eca - sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec - category: main - optional: false -- name: colorama - version: 0.4.6 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 3faab06a954c2a04039983f2c4a50d99 - sha256: 2c1b2e9755ce3102bca8d69e8f26e4f087ece73f50418186aee7c74bef8e1698 - category: main - optional: false -- name: cycler - version: 0.12.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda - hash: - md5: 5cd86562580f274031ede6aa6aa24441 - sha256: f221233f21b1d06971792d491445fd548224641af9443739b4b7b6d5d72954a8 - category: main - optional: false -- name: dbus - version: 1.13.6 - manager: conda - platform: linux-64 - dependencies: - expat: '>=2.4.2,<3.0a0' - libgcc-ng: '>=9.4.0' - libglib: '>=2.70.2,<3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - hash: - md5: ecfff944ba3960ecb334b9a2663d708d - sha256: 8f5f995699a2d9dbdd62c61385bfeeb57c82a681a7c8c5313c395aa0ccab68a5 - category: main - optional: false -- name: docutils - version: 0.18.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/docutils-0.18.1-py310hff52083_1.tar.bz2 - hash: - md5: 6405f87c427cdbc25b6b6a21bd6bfc2a - sha256: 2071bf7c56305d234161bef00c0c2ba7ae345484105d2ccc448c7c734634f346 - category: main - optional: false -- name: exceptiongroup - version: 1.1.3 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda - hash: - md5: e6518222753f519e911e83136d2158d9 - sha256: c28f715e049fe0f09785660bcbffa175ffb438720e5bc5a60d56d4b08364b315 - category: main - optional: false -- name: fftw - version: 3.3.10 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=11.4.0' - libstdcxx-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-mpi_openmpi_h4a81ba8_8.conda - hash: - md5: 278a83d849ad75d6844621b225939b11 - sha256: 1b23330f61b9bfec33bc4513842586f898e70dee46f60ccd1e3d5b0874e96f6f - category: main - optional: false -- name: fontconfig - version: 2.14.2 - manager: conda - platform: linux-64 - dependencies: - expat: '>=2.5.0,<3.0a0' - freetype: '>=2.12.1,<3.0a0' - libgcc-ng: '>=12' - libuuid: '>=2.32.1,<3.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda - hash: - md5: 0f69b688f52ff6da70bccb7ff7001d1d - sha256: 155d534c9037347ea7439a2c6da7c24ffec8e5dd278889b4c57274a1d91e0a83 - category: main - optional: false -- name: frozenlist - version: 1.4.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.0-py310h2372a71_1.conda - hash: - md5: c7b2865e86782925a872c8598b760c08 - sha256: cd1e59ceac047d9f692bb7cc2a6a6e2356a7d3db660b076b4afb19d35db2fd02 - category: main - optional: false -- name: glib-tools - version: 2.78.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libglib: 2.78.0 - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda - hash: - md5: e10134de3558dd95abda6987b5548f4f - sha256: 991803ca90e6ba54568ff1bcb8a02f69a9beb8a09988d257fc21e1bbb3557d8c - category: main - optional: false -- name: gxx_linux-64 - version: 12.3.0 - manager: conda - platform: linux-64 - dependencies: - binutils_linux-64: '2.40' - gcc_linux-64: 12.3.0 - gxx_impl_linux-64: 12.3.0.* - sysroot_linux-64: '' - url: https://conda.anaconda.org/conda-forge/linux-64/gxx_linux-64-12.3.0-h8a814eb_2.conda - hash: - md5: f517b1525e9783849bd56a5dc45a9960 - sha256: 9878771cf1316230150a795d213a2f1dd7dead07dc0bccafae20533d631d5e69 - category: main - optional: false -- name: idna - version: '3.4' - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 34272b248891bddccc64479f9a7fffed - sha256: 9887c35c374ec1847f167292d3fde023cb4c994a4ceeec283072b95440131f09 - category: main - optional: false -- name: imagesize - version: 1.4.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.4' - url: https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 7de5386c8fea29e76b303f37dde4c352 - sha256: c2bfd7043e0c4c12d8b5593de666c1e81d67b83c474a0a79282cc5c4ef845460 - category: main - optional: false -- name: iniconfig - version: 2.0.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - hash: - md5: f800d2da156d08e289b14e87e43c1ae5 - sha256: 38740c939b668b36a50ef455b077e8015b8c9cf89860d421b3fff86048f49666 - category: main - optional: false -- name: kahip - version: '3.15' - manager: conda - platform: linux-64 - dependencies: - _openmp_mutex: '>=4.5' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - metis: '>=5.1.0,<5.1.1.0a0' - openmpi: '>=4.1.5,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/kahip-3.15-h8d85730_2.conda - hash: - md5: e619a53db9fb5a295df08c3fde41b18d - sha256: cb3bfd03b616ff1f32b30bc44a013e5686ea0e8d0201107a84e79151968f93c4 - category: main - optional: false -- name: kiwisolver - version: 1.4.5 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda - hash: - md5: b8d67603d43b23ce7e988a5d81a7ab79 - sha256: bb51906639bced3de1d4d7740ac284cdaa89e2f22e0b1ec796378b090b0648ba - category: main - optional: false -- name: lcms2 - version: '2.15' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libtiff: '>=4.6.0,<4.7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda - hash: - md5: 9ab79924a3760f85a799f21bc99bd655 - sha256: 9125833b3019bf29c4a20295665e7bc912de581086a53693f10709fae409a3b2 - category: main - optional: false -- name: libblas - version: 3.9.0 - manager: conda - platform: linux-64 - dependencies: - libopenblas: '>=0.3.24,<1.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda - hash: - md5: 420f4e9be59d0dc9133a0f43f7bab3f3 - sha256: b1311b9414559c5760b08a32e0382ca27fa302c967968aa6f78e042519f728ce - category: main - optional: false -- name: libboost-devel - version: 1.82.0 - manager: conda - platform: linux-64 - dependencies: - libboost: 1.82.0 - libboost-headers: 1.82.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libboost-devel-1.82.0-h00ab1b0_6.conda - hash: - md5: 66fcac2ce711bea87d3aefd64cd03e9e - sha256: 3505d971488558513f1c0a673cb5d9a5f24c19753a06d68d29a355092804b9a2 - category: main - optional: false -- name: libclang13 - version: 15.0.7 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libllvm15: '>=15.0.7,<15.1.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda - hash: - md5: 1720df000b48e31842500323cb7be18c - sha256: df1221a9a05b9bb3bd9b43c08a7e2fe57a0e15a0010ef26065f7ed7666083f45 - category: main - optional: false -- name: libcups - version: 2.3.3 - manager: conda - platform: linux-64 - dependencies: - krb5: '>=1.21.1,<1.22.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda - hash: - md5: d4529f4dff3057982a7617c7ac58fde3 - sha256: bc67b9b21078c99c6bd8595fe7e1ed6da1f721007726e717f0449de7032798c4 - category: main - optional: false -- name: libcurl - version: 8.4.0 - manager: conda - platform: linux-64 - dependencies: - krb5: '>=1.21.2,<1.22.0a0' - libgcc-ng: '>=12' - libnghttp2: '>=1.52.0,<2.0a0' - libssh2: '>=1.11.0,<2.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.3,<4.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda - hash: - md5: 1158ac1d2613b28685644931f11ee807 - sha256: 25f4b6a8827d7b17a66e0bd9b5d194bf9a9e4a46fb14e2ef472fdad4b39426a6 - category: main - optional: false -- name: libpq - version: '15.4' - manager: conda - platform: linux-64 - dependencies: - krb5: '>=1.21.2,<1.22.0a0' - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.3,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_2.conda - hash: - md5: 4a180ab68881a86be49858c9baf4581d - sha256: f537ad28c083585e7c40e8a05f6febad8b9e649a48a1f2f497add3fc0947800b - category: main - optional: false -- name: libsystemd0 - version: '254' - manager: conda - platform: linux-64 - dependencies: - __glibc: '>=2.17,<3.0.a0' - libcap: '>=2.69,<2.70.0a0' - libgcc-ng: '>=12' - libgcrypt: '>=1.10.1,<2.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - xz: '>=5.2.6,<6.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda - hash: - md5: df4b1cd0c91b4234fb02b5701a4cdddc - sha256: e4732b9bc6acbdd3308cd0abd0860c9ea44e37127cd78acb797c996c20e4f42f - category: main - optional: false -- name: loguru - version: 0.7.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py310hff52083_1.conda - hash: - md5: 157e6221a079a60c7f6f6fcb87c722aa - sha256: 35319fe904289949e78af080ac05907bb545ecad64bd2eaea95efb8526069ee5 - category: main - optional: false -- name: markupsafe - version: 2.1.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda - hash: - md5: b74e07a054c479e45a83a83fc5be713c - sha256: ac46cc2f6d4bbeedcd2f508e43f43143a9286ced55730d8d97a3c91ceceb0d56 - category: main - optional: false -- name: mdurl - version: 0.1.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.0-pyhd8ed1ab_0.tar.bz2 - hash: - md5: f8dab71fdc13b1bf29a01248b156d268 - sha256: c678b9194e025b1fb665bec30ee20aab93399203583875b1dcc0a3b52a8f5523 - category: main - optional: false -- name: mpi4py - version: 3.1.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py310h2a790f2_1.conda - hash: - md5: 406ba065000549070682abfae8011248 - sha256: 516cbab850c2a0c894cf9f267e908d5771eb3636b2262d6b7353e24ecb344fa5 - category: main - optional: false -- name: multidict - version: 6.0.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.0.4-py310h2372a71_1.conda - hash: - md5: 7ca797f0a0c390ede770f415f5d5e039 - sha256: d8180dcee801bcde6408d924bab0010fc956ae7a14681694af21f9d4382d8ee8 - category: main - optional: false -- name: munkres - version: 1.1.4 - manager: conda - platform: linux-64 - dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 - hash: - md5: 2ba8498c1018c1e9c61eb99b973dfe19 - sha256: f86fb22b58e93d04b6f25e0d811b56797689d598788b59dcb47f59045b568306 - category: main - optional: false -- name: mypy_extensions - version: 1.0.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - hash: - md5: 4eccaeba205f0aed9ac3a9ea58568ca3 - sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3 - category: main - optional: false -- name: openjpeg - version: 2.5.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libtiff: '>=4.6.0,<4.7.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda - hash: - md5: 128c25b7fe6a25286a48f3a6a9b5b6f3 - sha256: 9fe91b67289267de68fda485975bb48f0605ac503414dc663b50d8b5f29bc82a - category: main - optional: false -- name: packaging - version: '23.2' - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda - hash: - md5: 79002079284aa895f883c6b7f3f88fd6 - sha256: 69b3ace6cca2dab9047b2c24926077d81d236bef45329d264b394001e3c3e52f - category: main - optional: false -- name: parmetis - version: 4.0.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - libstdcxx-ng: '>=9.3.0' - openmpi: '>=4.1,<4.2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/parmetis-4.0.3-he9a3056_1005.tar.bz2 - hash: - md5: 160999f9228e8aac87dc170f0810bc74 - sha256: 20a46cebbec3c50cd0e33372112f962b69bba1082436fc095c90f65aadd43ffd - category: main - optional: false -- name: pathspec - version: 0.11.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.11.2-pyhd8ed1ab_0.conda - hash: - md5: e41debb259e68490e3ab81e46b639ab6 - sha256: 7bcfa6d86359d45572ba9ccaeaedc04b0452e2654fe44b6fe378d0d37b8745e1 - category: main - optional: false -- name: pkgutil-resolve-name - version: 1.3.10 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda - hash: - md5: 405678b942f2481cecdb3e010f4925d9 - sha256: fecf95377134b0e8944762d92ecf7b0149c07d8186fb5db583125a2705c7ea0a - category: main - optional: false -- name: pluggy - version: 1.3.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda - hash: - md5: 2390bd10bed1f3fdc7a537fb5a447d8d - sha256: 7bf2ad9d747e71f1e93d0863c2c8061dd0f2fe1e582f28d292abfb40264a2eb5 - category: main - optional: false -- name: ply - version: '3.11' - manager: conda - platform: linux-64 - dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2 - hash: - md5: 7205635cd71531943440fbfe3b6b5727 - sha256: 2cd6fae8f9cbc806b7f828f006ae4a83c23fac917cacfd73c37ce322d4324e53 - category: main - optional: false -- name: ptscotch - version: 6.0.9 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=10.3.0' - libzlib: '>=1.2.11,<1.3.0a0' - openmpi: '>=4.1.2,<5.0a0' - scotch: '>=6.0.9,<6.0.10.0a0' - zlib: '>=1.2.11,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/ptscotch-6.0.9-h0a9c416_2.tar.bz2 - hash: - md5: 4e5d899d1d4704c80b3051eb01490bb2 - sha256: c45c98ea3f3ee1648a801e13115c39005e2f9c378796edbf56c8cf2588ef2371 - category: main - optional: false -- name: pycparser - version: '2.21' - manager: conda - platform: linux-64 - dependencies: - python: 2.7.*|>=3.4 - url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 076becd9e05608f8dc72757d5f3a91ff - sha256: 74c63fd03f1f1ea2b54e8bc529fd1a600aaafb24027b738d0db87909ee3a33dc - category: main - optional: false -- name: pygments - version: 2.16.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda - hash: - md5: 40e5cb18165466773619e5c963f00a7b - sha256: 3f0f0fadc6084960ec8cc00a32a03529c562ffea3b527eb73b1653183daad389 - category: main - optional: false -- name: pyparsing - version: 3.1.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda - hash: - md5: 176f7d56f0cfe9008bdf1bccd7de02fb - sha256: 4a1332d634b6c2501a973655d68f08c9c42c0bd509c349239127b10572b8354b - category: main - optional: false -- name: pysocks - version: 1.7.1 - manager: conda - platform: linux-64 - dependencies: - __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - hash: - md5: 2a7de29fb590ca14b5243c4c812c8025 - sha256: a42f826e958a8d22e65b3394f437af7332610e43ee313393d1cf143f0a2d274b - category: main - optional: false -- name: pytz - version: 2023.3.post1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda - hash: - md5: c93346b446cd08c169d843ae5fc0da97 - sha256: 6b680e63d69aaf087cd43ca765a23838723ef59b0a328799e6363eb13f52c49e - category: main - optional: false -- name: pyyaml - version: 6.0.1 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - yaml: '>=0.2.5,<0.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda - hash: - md5: bb010e368de4940771368bc3dc4c63e7 - sha256: aa78ccddb0a75fa722f0f0eb3537c73ee1219c9dd46cea99d6b9eebfdd780f3d - category: main - optional: false -- name: rpds-py - version: 0.10.6 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.10.6-py310hcb5633a_0.conda - hash: - md5: 43c12d8f7891a87378eb5339c49ef051 - sha256: a23d2f15c48cc689d26dc3f50ee91be9ed2925c5fbae7bc5d93e49db7517b847 - category: main - optional: false -- name: setuptools - version: 68.2.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda - hash: - md5: fc2166155db840c634a1291a5c35a709 - sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7 - category: main - optional: false -- name: six - version: 1.16.0 - manager: conda - platform: linux-64 - dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 - hash: - md5: e5f25f8dbc060e9a8d912e432202afc2 - sha256: a85c38227b446f42c5b90d9b642f2c0567880c15d72492d8da074a59c8f91dd6 - category: main - optional: false -- name: snowballstemmer - version: 2.2.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=2' - url: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 4d22a9315e78c6827f806065957d566e - sha256: a0fd916633252d99efb6223b1050202841fa8d2d53dacca564b0ed77249d3228 - category: main - optional: false -- name: sphinxcontrib-jsmath - version: 1.0.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda - hash: - md5: da1d979339e2714c30a8e806a33ec087 - sha256: d4337d83b8edba688547766fc80f1ac86d6ec86ceeeda93f376acc04079c5ce2 - category: main - optional: false -- name: tbb - version: 2021.10.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libhwloc: '>=2.9.3,<2.9.4.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.10.0-h00ab1b0_2.conda - hash: - md5: eb0d5c122f42714f86a7058d1ce7b2e6 - sha256: 79a6c48fa1df661af7ab3e4f5fa444dd305d87921be017413a8b97fd6d642328 - category: main - optional: false -- name: toml - version: 0.10.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=2.7' - url: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2 - hash: - md5: f832c45a477c78bebd107098db465095 - sha256: f0f3d697349d6580e4c2f35ba9ce05c65dc34f9f049e85e45da03800b46139c1 - category: main - optional: false -- name: tomli - version: 2.0.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 5844808ffab9ebdb694585b50ba02a96 - sha256: 4cd48aba7cd026d17e86886af48d0d2ebc67ed36f87f6534f4b67138f5a5a58f - category: main - optional: false -- name: tornado - version: 6.3.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda - hash: - md5: b23e0147fa5f7a9380e06334c7266ad5 - sha256: 209b6788b81739d3cdc2f04ad3f6f323efd85b1a30f2edce98ab76d98079fac8 - category: main - optional: false -- name: typing_extensions - version: 4.8.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda - hash: - md5: 5b1be40a26d10a06f6d4f1f9e19fa0c7 - sha256: 38d16b5c53ec1af845d37d22e7bb0e6c934c7f19499123507c5a470f6f8b7dde - category: main - optional: false -- name: unicodedata2 - version: 15.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda - hash: - md5: 72637c58d36d9475fda24700c9796f19 - sha256: 5ab2f2d4542ba0cc27d222c08ae61706babe7173b0c6dfa748aa37ff2fa9d824 - category: main - optional: false -- name: wheel - version: 0.41.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda - hash: - md5: 1ccd092478b3e0ee10d7a891adbf8a4f - sha256: 21bcec5373b04d739ab65252b5532b04a08d229865ebb24b5b94902d6d0a77b0 - category: main - optional: false -- name: xcb-util-image - version: 0.4.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - xcb-util: '>=0.4.0,<0.5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda - hash: - md5: 9d7bcddf49cbf727730af10e71022c73 - sha256: 92ffd68d2801dbc27afe223e04ae7e78ef605fc8575f107113c93c7bafbd15b0 - category: main - optional: false -- name: xkeyboard-config - version: '2.40' - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - xorg-libx11: '>=1.8.6,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0.conda - hash: - md5: 07c15d846a2e4d673da22cbd85fdb6d2 - sha256: a01fcb9c3346ee08aa24b3900a08896f2e8f80c891378a57d71764e16bbd6141 - category: main - optional: false -- name: xorg-libxext - version: 1.3.4 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - xorg-libx11: '>=1.7.2,<2.0a0' - xorg-xextproto: '' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - hash: - md5: 82b6df12252e6f32402b96dacc656fec - sha256: 73e5cfbdff41ef8a844441f884412aa5a585a0f0632ec901da035a03e1fe1249 - category: main - optional: false -- name: xorg-libxfixes - version: 5.0.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - xorg-fixesproto: '' - xorg-libx11: '>=1.7.0,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2 - hash: - md5: e9a21aa4d5e3e5f1aed71e8cefd46b6a - sha256: 1e426a1abb774ef1dcf741945ed5c42ad12ea2dc7aeed7682d293879c3e1e4c3 - category: main - optional: false -- name: xorg-libxrender - version: 0.9.11 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-renderproto: '' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda - hash: - md5: ed67c36f215b310412b2af935bf3e530 - sha256: 26da4d1911473c965c32ce2b4ff7572349719eaacb88a066db8d968a4132c3f7 - category: main - optional: false -- name: xorg-libxt - version: 1.3.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - xorg-kbproto: '' - xorg-libice: '>=1.1.1,<2.0a0' - xorg-libsm: '>=1.2.4,<2.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-xproto: '' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hd590300_1.conda - hash: - md5: ae92aab42726eb29d16488924f7312cb - sha256: e7648d1efe2e858c4bc63ccf4a637c841dc971b37ded85a01be97a5e240fecfa - category: main - optional: false -- name: zipp - version: 3.17.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda - hash: - md5: 2e4d6bc0b14e10f895fc6791a7d9b26a - sha256: bced1423fdbf77bca0a735187d05d9b9812d2163f60ab426fc10f11f92ecbe26 - category: main - optional: false -- name: aiosignal - version: 1.3.1 - manager: conda - platform: linux-64 - dependencies: - frozenlist: '>=1.1.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2 - hash: - md5: d1e1eb7e21a9e2c74279d87dafb68156 - sha256: 575c742e14c86575986dc867463582a970463da50b77264cdf54df74f5563783 - category: main - optional: false -- name: babel - version: 2.13.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - pytz: '' - setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/babel-2.13.0-pyhd8ed1ab_0.conda - hash: - md5: 22541af7a9eb59fc6afcadb7ecdf9219 - sha256: 25b0a72c9d35319307a9714b05aa5c18b5c82f8c8e7bece65778202c6b8ad2a7 - category: main - optional: false -- name: cached-property - version: 1.5.2 - manager: conda - platform: linux-64 - dependencies: - cached_property: '>=1.5.2,<1.5.3.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 - hash: - md5: 9b347a7ec10940d3f7941ff6c460b551 - sha256: 561e6660f26c35d137ee150187d89767c988413c978e1b712d53f27ddf70ea17 - category: main - optional: false -- name: cairo - version: 1.18.0 - manager: conda - platform: linux-64 - dependencies: - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - icu: '>=73.2,<74.0a0' - libgcc-ng: '>=12' - libglib: '>=2.78.0,<3.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - pixman: '>=0.42.2,<1.0a0' - xorg-libice: '>=1.1.1,<2.0a0' - xorg-libsm: '>=1.2.4,<2.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxrender: '>=0.9.11,<0.10.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda - hash: - md5: f907bb958910dc404647326ca80c263e - sha256: 142e2639a5bc0e99c44d76f4cc8dce9c6a2d87330c4beeabb128832cd871a86e - category: main - optional: false -- name: cffi - version: 1.16.0 - manager: conda - platform: linux-64 - dependencies: - libffi: '>=3.4,<4.0a0' - libgcc-ng: '>=12' - pycparser: '' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda - hash: - md5: 45846a970e71ac98fd327da5d40a0a2c - sha256: 007e7f69ab45553b7bf11f2c1b8d3f3a13fd42997266a0d57795f41c7d38df36 - category: main - optional: false -- name: coverage - version: 7.3.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - tomli: '' - url: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda - hash: - md5: 33c03cd5711885c920ddff676fb84f98 - sha256: f9c07ee8807188c39bd415dd8ce39ac7a90c41cb0cc741e9af429e1f886930c6 - category: main - optional: false -- name: fonttools - version: 4.43.1 - manager: conda - platform: linux-64 - dependencies: - brotli: '' - libgcc-ng: '>=12' - munkres: '' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - unicodedata2: '>=14.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.43.1-py310h2372a71_0.conda - hash: - md5: c7d552c32b87beb736c9658441bf93a9 - sha256: 66f89ff0c0e6cd9940e866b04f5442b4ab802d5d279012c5eb13c639cf18da76 - category: main - optional: false -- name: glib - version: 2.78.0 - manager: conda - platform: linux-64 - dependencies: - gettext: '>=0.21.1,<1.0a0' - glib-tools: 2.78.0 - libgcc-ng: '>=12' - libglib: 2.78.0 - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - python: '*' - url: https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda - hash: - md5: 2f55a36b549f51a7e0c2b1e3c3f0ccd4 - sha256: b7fd5ef9aee4205e14105dc9f79b3de326af091c0253e1e52d3e4ee0d960851d - category: main - optional: false -- name: hdf5 - version: 1.14.2 - manager: conda - platform: linux-64 - dependencies: - libaec: '>=1.0.6,<2.0a0' - libcurl: '>=8.2.1,<9.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openmpi: '>=4.1.5,<5.0a0' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-mpi_openmpi_h327c9cf_0.conda - hash: - md5: e69f4f8452059bf0eacd91422077c090 - sha256: 9bdba8976cb1904557403d083f6521a1014554275bae1f9f7a7e695fee02a52f - category: main - optional: false -- name: importlib-metadata - version: 6.8.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - zipp: '>=0.5' - url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda - hash: - md5: 4e9f59a060c3be52bc4ddc46ee9b6946 - sha256: 2797ed927d65324309b6c630190d917b9f2111e0c217b721f80429aeb57f9fcf - category: main - optional: false -- name: importlib_resources - version: 6.1.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8' - zipp: '>=3.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda - hash: - md5: 48b0d98e0c0ec810d3ccc2a0926c8c0e - sha256: adab6da633ec3b642f036ab5c1196c3e2db0e8db57fb0c7fc9a8e06e29fa9bdc - category: main - optional: false -- name: isort - version: 5.12.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.8,<4.0' - setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/isort-5.12.0-pyhd8ed1ab_1.conda - hash: - md5: 07ed3421bad60867234c7a9282ea39d4 - sha256: d34a62e33ac7acc8fd3167ceb0e2aee4e7974b94de263f52d752716429d95bcb - category: main - optional: false -- name: jinja2 - version: 3.1.2 - manager: conda - platform: linux-64 - dependencies: - markupsafe: '>=2.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2 - hash: - md5: c8490ed5c70966d232fdd389d0dbed37 - sha256: b045faba7130ab263db6a8fdc96b1a3de5fcf85c4a607c5f11a49e76851500b5 - category: main - optional: false -- name: kahip-python - version: '3.15' - manager: conda - platform: linux-64 - dependencies: - kahip: '3.15' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/kahip-python-3.15-py310h3448afa_2.conda - hash: - md5: a981124f70c638d74ae49ff4da2db830 - sha256: f1e9caeed46af06201f232bab19eeac14125ef5baef8cbba56f02c486153067e - category: main - optional: false -- name: libcblas - version: 3.9.0 - manager: conda - platform: linux-64 - dependencies: - libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda - hash: - md5: d12374af44575413fbbd4a217d46ea33 - sha256: 84fddccaf58f42b07af7fb42512bd617efcb072f17bdef27f4c1884dbd33c86a - category: main - optional: false -- name: libclang - version: 15.0.7 - manager: conda - platform: linux-64 - dependencies: - libclang13: 15.0.7 - libgcc-ng: '>=12' - libllvm15: '>=15.0.7,<15.1.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda - hash: - md5: 0922208521c0463e690bbaebba7eb551 - sha256: c2b0c8dd675e30d86bad410679f258820bc36723fbadffc13c2f60249be91815 - category: main - optional: false -- name: libglu - version: 9.0.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-xextproto: '>=7.3.0,<8.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-hac7e632_1003.conda - hash: - md5: 50c389a09b6b7babaef531eb7cb5e0ca - sha256: 8368435c41105dc3e1c02896a02ecaa21b77d0b0d67fc8b06a16ba885c86f917 - category: main - optional: false -- name: liblapack - version: 3.9.0 - manager: conda - platform: linux-64 - dependencies: - libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda - hash: - md5: 9f100edf65436e3eabc2a51fc00b2c37 - sha256: 58f402aae605ebd0932e1cbbf855cd49dcdfa2fcb6aab790a4f6068ec5937878 - category: main - optional: false -- name: libraw - version: 0.21.1 - manager: conda - platform: linux-64 - dependencies: - _openmp_mutex: '>=4.5' - lcms2: '>=2.15,<3.0a0' - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libraw-0.21.1-h501b40f_1.conda - hash: - md5: 7ec21ba0b5095601a90ff5e32359a265 - sha256: 5f7ef7aedfa1f0c2967bdcba1664279e0166851817ab96a969d7970548909a5f - category: main - optional: false -- name: libva - version: 2.20.0 - manager: conda - platform: linux-64 - dependencies: - libdrm: '>=2.4.114,<2.5.0a0' - libgcc-ng: '>=12' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxfixes: '' - url: https://conda.anaconda.org/conda-forge/linux-64/libva-2.20.0-hd590300_0.conda - hash: - md5: 933bcea637569c6cea6084957028cb53 - sha256: 972d6f67d854d0f0fc2593f8bddc8d411859437ace7248c374e1a85a9ea9d410 - category: main - optional: false -- name: libxkbcommon - version: 1.6.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - libxml2: '>=2.11.5,<2.12.0a0' - xkeyboard-config: '' - xorg-libxau: '>=1.0.11,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda - hash: - md5: d8edd0e29db6fb6b6988e1a28d35d994 - sha256: 6cd22602fe1517af411cfbf65babf1d6aad276100c2bce90d5e316214a602bbb - category: main - optional: false -- name: markdown-it-py - version: 3.0.0 - manager: conda - platform: linux-64 - dependencies: - mdurl: '>=0.1,<1' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda - hash: - md5: 93a8e71256479c62074356ef6ebf501b - sha256: c041b0eaf7a6af3344d5dd452815cdc148d6284fec25a4fa3f4263b3a021e962 - category: main - optional: false -- name: pillow - version: 10.0.1 - manager: conda - platform: linux-64 - dependencies: - freetype: '>=2.12.1,<3.0a0' - lcms2: '>=2.15,<3.0a0' - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libtiff: '>=4.6.0,<4.7.0a0' - libwebp-base: '>=1.3.2,<2.0a0' - libxcb: '>=1.15,<1.16.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - openjpeg: '>=2.5.0,<3.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - tk: '>=8.6.12,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py310h29da1c1_1.conda - hash: - md5: 8e93b1c69cddf89fd412178d3d418bae - sha256: 4c18593b1b90299e0f1f7a279ccce6dbe0aba694758ee039c0850e0119d3b3e8 - category: main - optional: false -- name: pint - version: '0.22' - manager: conda - platform: linux-64 - dependencies: - python: '>=3.9' - typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/noarch/pint-0.22-pyhd8ed1ab_1.conda - hash: - md5: a719c3f3959c529e558e9ed9f98c3f30 - sha256: 49795ff6e5e634523aafe34e869c425e2cdc4a1fcb11aa294d7983035bc38622 - category: main - optional: false -- name: pip - version: 23.3.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - setuptools: '' - wheel: '' - url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda - hash: - md5: 2400c0b86889f43aa52067161e1fb108 - sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 - category: main - optional: false -- name: proj - version: 9.3.0 - manager: conda - platform: linux-64 - dependencies: - libcurl: '>=8.4.0,<9.0a0' - libgcc-ng: '>=12' - libsqlite: '>=3.43.2,<4.0a0' - libstdcxx-ng: '>=12' - libtiff: '>=4.6.0,<4.7.0a0' - sqlite: '' - url: https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda - hash: - md5: b5e57a0c643da391bef850922963eece - sha256: 252f6c31101719e3d524679e69ae81e6323b93b143e1360169bf50e89386bf24 - category: main - optional: false -- name: pulseaudio-client - version: '16.1' - manager: conda - platform: linux-64 - dependencies: - dbus: '>=1.13.6,<2.0a0' - libgcc-ng: '>=12' - libglib: '>=2.76.4,<3.0a0' - libsndfile: '>=1.2.2,<1.3.0a0' - libsystemd0: '>=254' - url: https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda - hash: - md5: ac902ff3c1c6d750dd0dfc93a974ab74 - sha256: 9981c70893d95c8cac02e7edd1a9af87f2c8745b772d529f08b7f9dafbe98606 - category: main - optional: false -- name: pytest - version: 7.4.2 - manager: conda - platform: linux-64 - dependencies: - colorama: '' - exceptiongroup: '>=1.0.0rc8' - iniconfig: '' - packaging: '' - pluggy: '>=0.12,<2.0' - python: '>=3.7' - tomli: '>=1.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.2-pyhd8ed1ab_0.conda - hash: - md5: 6dd662ff5ac9a783e5c940ce9f3fe649 - sha256: 150bfb2a86dffd4ce1e91c2d61dde5779fb3ee338675e210fec4ef508ffff28c - category: main - optional: false -- name: python-dateutil - version: 2.8.2 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.6' - six: '>=1.5' - url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2 - hash: - md5: dd999d1cc9f79e67dbb855c8924c7984 - sha256: 54d7785c7678166aa45adeaccfc1d2b8c3c799ca2dc05d4a82bb39b1968bd7da - category: main - optional: false -- name: referencing - version: 0.30.2 - manager: conda - platform: linux-64 - dependencies: - attrs: '>=22.2.0' - python: '>=3.8' - rpds-py: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.30.2-pyhd8ed1ab_0.conda - hash: - md5: a33161b983172ba6ef69d5fc850650cd - sha256: a6768fabc12f1eed87fec68c5c65439e908655cded1e458d70a164abbce13287 - category: main - optional: false -- name: sip - version: 6.7.12 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - packaging: '' - ply: '' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - tomli: '' - url: https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py310hc6cd4ac_0.conda - hash: - md5: 68d5bfccaba2d89a7812098dd3966d9b - sha256: 4c350a7ed9f5fd98196a50bc74ce1dc3bb05b0c90d17ea120439755fe2075796 - category: main - optional: false -- name: tbb-devel - version: 2021.10.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - tbb: 2021.10.0 - url: https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.10.0-h00ab1b0_2.conda - hash: - md5: 35cc01dda6cac3d1c0c6dda6210f8bab - sha256: 559b4b5fe4c8a82dcec084193fc05666f431f6b2b9bc001d3d82f67c0eb03728 - category: main - optional: false -- name: typing-extensions - version: 4.8.0 - manager: conda - platform: linux-64 - dependencies: - typing_extensions: 4.8.0 - url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda - hash: - md5: 384462e63262a527bda564fa2d9126c0 - sha256: d6e1dddd0c372218ef15912383d351ac8c73465cbf16238017f0269813cafe2d - category: main - optional: false -- name: urllib3 - version: 2.0.7 - manager: conda - platform: linux-64 - dependencies: - brotli-python: '>=1.0.9' - pysocks: '>=1.5.6,<2.0,!=1.5.7' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda - hash: - md5: 270e71c14d37074b1d066ee21cf0c4a6 - sha256: 9fe14735dde74278c6f1710cbe883d5710fc98501a96031dec6849a8d8a1bb11 - category: main - optional: false -- name: xorg-libxmu - version: 1.1.3 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - xorg-libx11: '>=1.7.0,<2.0a0' - xorg-libxext: 1.3.* - xorg-libxt: '>=1.2.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.1.3-h7f98852_0.tar.bz2 - hash: - md5: 3cdb89236358326adfce12be820a8af3 - sha256: 3a9f9f8bbf3a6934dada98a7a224dd264c533a251d2a92be604a4b23e772e79b - category: main - optional: false -- name: yarl - version: 1.9.2 - manager: conda - platform: linux-64 - dependencies: - idna: '>=2.0' - libgcc-ng: '>=12' - multidict: '>=4.0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.9.2-py310h2372a71_1.conda - hash: - md5: 30ae8a8f248b4e7cd2622cff41cb05a7 - sha256: 0a9aeb8cf885ef6dd0a737693823a4e4d27b2ee724fa3af317d8ccd925fa4258 - category: main - optional: false -- name: async-timeout - version: 4.0.3 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - typing-extensions: '>=3.6.5' - url: https://conda.anaconda.org/conda-forge/noarch/async-timeout-4.0.3-pyhd8ed1ab_0.conda - hash: - md5: 3ce482ec3066e6d809dbbb1d1679f215 - sha256: bd8b698e7f037a9c6107216646f1191f4f7a7fc6da6c34d1a6d4c211bcca8979 - category: main - optional: false -- name: fenics-libbasix - version: 0.6.0 - manager: conda - platform: linux-64 - dependencies: - libblas: '>=3.9.0,<4.0a0' - libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/fenics-libbasix-0.6.0-hfdc072b_5.conda - hash: - md5: 1cb6934c5efd89a4e70689bbea874902 - sha256: 02ea6089b619322083743393e103e41e4c6dcff9084e76e7061e671b376b7ffc - category: main - optional: false -- name: fltk - version: 1.3.8 - manager: conda - platform: linux-64 - dependencies: - freetype: '>=2.12.1,<3.0a0' - libgcc-ng: '>=12' - libglu: '' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - xorg-libice: '>=1.1.1,<2.0a0' - xorg-libsm: '>=1.2.4,<2.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxau: '>=1.0.11,<2.0a0' - xorg-libxdmcp: '' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxfixes: '' - xorg-libxrender: '>=0.9.11,<0.10.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/fltk-1.3.8-hfbcda93_2.conda - hash: - md5: 22ac38d068375b592df4eebabc15a4d5 - sha256: 34b510f05839b02aed19ef789fc0d87ea7917f2f8a5fb632ce26e1afd2b23c3a - category: main - optional: false -- name: freeimage - version: 3.18.0 - manager: conda - platform: linux-64 - dependencies: - imath: '>=3.1.9,<3.1.10.0a0' - jxrlib: '>=1.1,<1.2.0a0' - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libraw: '>=0.21.1,<0.22.0a0' - libstdcxx-ng: '>=12' - libtiff: '>=4.6.0,<4.7.0a0' - libwebp-base: '>=1.3.2,<2.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - openexr: '>=3.2.1,<3.3.0a0' - openjpeg: '>=2.5.0,<3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/freeimage-3.18.0-h138f111_17.conda - hash: - md5: 01778fd100b523cc9e12b1292f8d8f12 - sha256: 6bb076953cafc6f38b871b768c814aa38197ffdcf702bfc2ca18b942d2c3754f - category: main - optional: false -- name: glew - version: 2.1.0 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=9.3.0' - libglu: '' - libstdcxx-ng: '>=9.3.0' - xorg-libx11: '' - xorg-libxext: '' - url: https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2 - hash: - md5: fb05eb5c47590b247658243d27fc32f1 - sha256: 86f5484e38f4604f7694b14f64238e932e8fd8d7364e86557f4911eded2843ae - category: main - optional: false -- name: gstreamer - version: 1.22.6 - manager: conda - platform: linux-64 - dependencies: - __glibc: '>=2.17,<3.0.a0' - gettext: '>=0.21.1,<1.0a0' - glib: '>=2.78.0,<3.0a0' - libgcc-ng: '>=12' - libglib: '>=2.78.0,<3.0a0' - libiconv: '>=1.17,<2.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda - hash: - md5: 1c95f7c612f9121353c4ef764678113e - sha256: 5578119cec4e86b7b607678781026ebe1170cb851b4f784c49b09bed1c92566c - category: main - optional: false -- name: harfbuzz - version: 8.2.1 - manager: conda - platform: linux-64 - dependencies: - cairo: '>=1.16.0,<2.0a0' - freetype: '>=2.12.1,<3.0a0' - graphite2: '' - icu: '>=73.2,<74.0a0' - libgcc-ng: '>=12' - libglib: '>=2.78.0,<3.0a0' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda - hash: - md5: 98db5f8813f45e2b29766aff0e4a499c - sha256: 5ca6585e6a4348bcbe214d57f5d6f560d15d23a6650770a2909475848b214edb - category: main - optional: false -- name: hypre - version: 2.28.0 - manager: conda - platform: linux-64 - dependencies: - libblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/hypre-2.28.0-mpi_openmpi_h4032107_0.conda - hash: - md5: f77f78f41ee21adbcd1b9c8acb8047e2 - sha256: b2e8f249f11c13861f9f4cf008757414306166ccd26e0426238796f6d329289b - category: main - optional: false -- name: importlib_metadata - version: 6.8.0 - manager: conda - platform: linux-64 - dependencies: - importlib-metadata: '>=6.8.0,<6.8.1.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda - hash: - md5: b279b07ce18058034e5b3606ba103a8b - sha256: b96e01dc42d547d6d9ceb1c5b52a5232cc04e40153534350f702c3e0418a6b3f - category: main - optional: false -- name: jsonschema-specifications - version: 2023.7.1 - manager: conda - platform: linux-64 - dependencies: - importlib_resources: '>=1.4.0' - python: '>=3.8' - referencing: '>=0.25.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.7.1-pyhd8ed1ab_0.conda - hash: - md5: 7c27ea1bdbe520bb830dcadd59f55cbf - sha256: 7b0061e106674f27cc718f79a095e90a5667a3635ec6626dd23b3be0fd2bfbdc - category: main - optional: false -- name: libadios2 - version: 2.9.1 - manager: conda - platform: linux-64 - dependencies: - blosc: '>=1.21.5,<2.0a0' - bzip2: '>=1.0.8,<2.0a0' - hdf5: '>=1.14.2,<1.14.3.0a0' - libffi: '>=3.4,<4.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - openmpi: '>=4.1.6,<5.0a0' - zeromq: '>=4.3.5,<4.4.0a0' - zfp: '>=0.5.5,<1.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libadios2-2.9.1-mpi_openmpi_h4e62573_4.conda - hash: - md5: cc2ffb05135c6185b3263badc7ffea80 - sha256: c9e495eda85fdb62189e91b0fd80332a1e4ed70ddfff283e0bbd4ea347f37cc3 - category: main - optional: false -- name: libnetcdf - version: 4.9.2 - manager: conda - platform: linux-64 - dependencies: - blosc: '>=1.21.4,<2.0a0' - bzip2: '>=1.0.8,<2.0a0' - hdf4: '>=4.2.15,<4.2.16.0a0' - hdf5: '>=1.14.2,<1.14.3.0a0' - libaec: '>=1.0.6,<2.0a0' - libcurl: '>=8.2.1,<9.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libxml2: '>=2.11.5,<2.12.0a0' - libzip: '>=1.10.1,<2.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.2,<4.0a0' - zlib: '' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda - hash: - md5: a19fa6cacf80c8a366572853d5890eb4 - sha256: 305ffc3ecaffce10754e4d057daa9803e8dc86d68b14524a791c7dc5598c1d2f - category: main - optional: false -- name: mdit-py-plugins - version: 0.4.0 - manager: conda - platform: linux-64 - dependencies: - markdown-it-py: '>=1.0.0,<4.0.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.0-pyhd8ed1ab_0.conda - hash: - md5: 6c5358a10873a15398b6f15f60cb5e1f - sha256: 1ddac8d2be448cd1fbe49d2ca09df7e10d99679d53146a917f8bb4899f76d0ca - category: main - optional: false -- name: numpy - version: 1.26.0 +- name: _openmp_mutex + version: '4.5' manager: conda platform: linux-64 dependencies: - libblas: '>=3.9.0,<4.0a0' - libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py310hb13e2d6_0.conda + _libgcc_mutex: '0.1' + libgomp: '>=7.5.0' + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 hash: - md5: ac3b67e928cc71548efad9b522d42fef - sha256: d4671e365c2ed30bf8a376bdc65afcbeeae440ca2091c8712ff8f23678f64973 + md5: 73aaf86a425cc6e73fcf236a5a46396d + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 category: main optional: false -- name: platformdirs - version: 3.11.0 +- name: annotated-types + version: 0.7.0 manager: conda platform: linux-64 dependencies: python: '>=3.7' - typing-extensions: '>=4.6.3' - url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda + typing-extensions: '>=4.0.0' + url: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda hash: - md5: 8f567c0a74aa44cf732f15773b4083b0 - sha256: b3d809ff5a18ee8514bba8bc05a23b4cdf1758090a18a2cf742af38aed405144 + md5: 7e9f4612544c8edbfd6afad17f1bd045 + sha256: 668f0825b6c18e4012ca24a0070562b6ec801ebc7008228a428eb52b4038873f category: main optional: false -- name: pyqt5-sip - version: 12.12.2 +- name: bzip2 + version: 1.0.8 manager: conda platform: linux-64 dependencies: + __glibc: '>=2.17,<3.0.a0' libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - packaging: '' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - sip: '' - toml: '' - url: https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_5.conda + url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda hash: - md5: ef5333594a958b25912002886b82b253 - sha256: a6aec078683ed3cf1650b7c47e3f0fe185015d54ea37fe76b9f31f05e1fd087d + md5: 62ee74e96c5ebb0af99386de58cf9553 + sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d category: main optional: false -- name: requests - version: 2.31.0 +- name: ca-certificates + version: 2024.8.30 manager: conda platform: linux-64 - dependencies: - certifi: '>=2017.4.17' - charset-normalizer: '>=2,<4' - idna: '>=2.5,<4' - python: '>=3.7' - urllib3: '>=1.21.1,<3' - url: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda hash: - md5: a30144e4156cdbb236f99ebb49828f8b - sha256: 9f629d6fd3c8ac5f2a198639fe7af87c4db2ac9235279164bfe0fcb49d8c4bad + md5: c27d1c142233b5bc9ca570c6e2e0c244 + sha256: afee721baa6d988e27fef1832f68d6f32ac8cc99cdf6015732224c2841a09cea category: main optional: false -- name: rich - version: 13.6.0 +- name: ld_impl_linux-64 + version: '2.43' manager: conda platform: linux-64 dependencies: - markdown-it-py: '>=2.2.0' - pygments: '>=2.13.0,<3.0.0' - python: '>=3.7.0' - typing_extensions: '>=4.0.0,<5.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.6.0-pyhd8ed1ab_0.conda + __glibc: '>=2.17,<3.0.a0' + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_0.conda hash: - md5: 3ca4829f40710f581ca1d76bc907e99f - sha256: a2f8838a75ab8c2c1da0a813c7569d4f6efba0d2b5dc3a7659e2cb6d96bd8e19 + md5: 588394be268105cd4e016f49550344c6 + sha256: ba72c23a29594aff1d743d51dd2a81fca85ff61f66c5e64bb43ee38e4cad90a5 category: main optional: false -- name: scalapack - version: 2.2.0 +- name: libblas + version: 3.9.0 manager: conda platform: linux-64 dependencies: - libblas: '>=3.8.0,<4.0a0' - libgcc-ng: '>=10.3.0' - libgfortran-ng: '' - libgfortran5: '>=10.3.0' - liblapack: '>=3.8.0,<4.0a0' - openmpi: '>=4.1.2,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/scalapack-2.2.0-h67de57e_1.tar.bz2 + libopenblas: '>=0.3.27,<1.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda hash: - md5: 9f5631123fb242b76de37a6f0f2804cf - sha256: 84d4994cf823a8226b9bb4191a2f8a8111f5b1d561f06de0b0c22643362953d2 + md5: 80aea6603a6813b16ec119d00382b772 + sha256: 3097f7913bda527d4fe9f824182b314e130044e582455037fca6f4e97965d83c category: main optional: false -- name: suitesparse - version: 5.10.1 +- name: libcblas + version: 3.9.0 manager: conda platform: linux-64 dependencies: - libblas: '>=3.8.0,<4.0a0' - libcblas: '>=3.8.0,<4.0a0' - libgcc-ng: '>=9.4.0' - liblapack: '>=3.8.0,<4.0a0' - libstdcxx-ng: '>=9.4.0' - metis: '>=5.1.0,<5.1.1.0a0' - mpfr: '>=4.1.0,<5.0a0' - tbb: '>=2021.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/suitesparse-5.10.1-h9e50725_1.tar.bz2 + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda hash: - md5: a3a685b5f9aeb890ed874502fe56accf - sha256: 176d004eafe3f07110315d1c96ab7245fbba8677364933213404890a0e2e9d1f + md5: f5b8822297c9c790cec0795ca1fc9be6 + sha256: 2a52bccc5b03cdf014d856d0b85dbd591faa335ab337d620cd6aded121d7153c category: main optional: false -- name: superlu - version: 5.2.2 +- name: libexpat + version: 2.6.3 manager: conda platform: linux-64 dependencies: - libblas: '>=3.9.0,<4.0a0' - libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=10.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/superlu-5.2.2-h00795ac_0.tar.bz2 + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda hash: - md5: 2fe6fcc1c7d6e2e8ea3f16ebd3306dbe - sha256: dd80a0f64309849d0a222da3e2edbb28de741202f8d0578ccca705e1ca16dabd + md5: 59f4c43bb1b5ef1c71946ff2cbf59524 + sha256: 4bb47bb2cd09898737a5211e2992d63c555d63715a07ba56eae0aff31fb89c22 category: main optional: false -- name: superlu_dist - version: 7.2.0 +- name: libffi + version: 3.4.2 manager: conda platform: linux-64 dependencies: - _openmp_mutex: '>=4.5' - libblas: '>=3.8.0,<4.0a0' libgcc-ng: '>=9.4.0' - libgfortran-ng: '' - libgfortran5: '>=9.4.0' - liblapack: '>=3.8.0,<4.0a0' - libstdcxx-ng: '>=9.4.0' - metis: '>=5.1.0,<5.1.1.0a0' - openmpi: '>=4.1.2,<5.0a0' - parmetis: '>=4.0.3,<4.1.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/superlu_dist-7.2.0-h34f6f4d_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 hash: - md5: 096b645a660e2d764771e73f732b6874 - sha256: abaaac9e2b27a0fd6e9efd02daf8a4bf96d4b8ae7946d35b68a7399e2191817f + md5: d645c6d2ac96843a2bfaccd2d62b3ac3 + sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e category: main optional: false -- name: aiohttp - version: 3.8.6 +- name: libgcc + version: 14.1.0 manager: conda platform: linux-64 dependencies: - aiosignal: '>=1.1.2' - async-timeout: <5.0,>=4.0.0a3 - attrs: '>=17.3.0' - charset-normalizer: '>=2.0,<4.0' - frozenlist: '>=1.1.1' - libgcc-ng: '>=12' - multidict: '>=4.5,<7.0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - yarl: '>=1.0,<2.0' - url: https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.8.6-py310h2372a71_1.conda + _libgcc_mutex: '0.1' + _openmp_mutex: '>=4.5' + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda hash: - md5: d265a71480afd9479c9333ba86375d04 - sha256: e32892fd786dc4ba150701ffd0981c8e942fc77e52754f6f1c331392004bd6f1 + md5: 002ef4463dd1e2b44a94a4ace468f5d2 + sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 category: main optional: false -- name: black - version: 23.10.0 +- name: libgcc-ng + version: 14.1.0 manager: conda platform: linux-64 dependencies: - click: '>=8.0.0' - mypy_extensions: '>=0.4.3' - packaging: '>=22.0' - pathspec: '>=0.9' - platformdirs: '>=2' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - tomli: '>=1.1.0' - typing_extensions: '>=4.0.1' - url: https://conda.anaconda.org/conda-forge/linux-64/black-23.10.0-py310hff52083_0.conda + libgcc: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda hash: - md5: 05a18fabc4d4c323d612aea919b295ff - sha256: 85504a132f80ecc82ffeb9532151d003eb75a765536b829a4ec3be51cb3fe17e + md5: 1efc0ad219877a73ef977af7dbb51f17 + sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 category: main optional: false -- name: cftime - version: 1.6.2 +- name: libgfortran + version: 14.1.0 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - numpy: '>=1.22.4,<2.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310h1f7b6fc_2.conda + libgfortran5: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda hash: - md5: 7925aaa4330045bc32d334b20f446902 - sha256: 182a5e5584167a51625617775a2c641784985a5e769e74e3dd445bd6f1c0e7e1 + md5: 591e631bc1ae62c64f2ab4f66178c097 + sha256: ed77f04f873e43a26e24d443dd090631eedc7d0ace3141baaefd96a123e47535 category: main optional: false -- name: contourpy - version: 1.1.1 +- name: libgfortran-ng + version: 14.1.0 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - numpy: '>=1.16' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py310hd41b1e2_1.conda + libgfortran: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda hash: - md5: 6a38f65d330b74495ad6990280486049 - sha256: 16f44e7e47f7cf9c3c02d760beb9179698510740e0eb1927ade3d8fb69aa1a0d + md5: 16cec94c5992d7f42ae3f9fa8b25df8d + sha256: a2dc35cb7f87bb5beebf102d4085574c6a740e1df58e743185d4434cc5e4e0ae category: main optional: false -- name: fenics-basix - version: 0.6.0 +- name: libgfortran5 + version: 14.1.0 manager: conda platform: linux-64 dependencies: - fenics-libbasix: 0.6.0 - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - numpy: '' - pybind11-abi: '4' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/fenics-basix-0.6.0-py310heada3b3_5.conda + libgcc: '>=14.1.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda hash: - md5: 500308aa1c64da979a417778a1b3039f - sha256: 2f2d5b20709650e99046d8939a8e3ceba5bee1c039f0228d07504c9fc931aba6 + md5: 10a0cef64b784d6ab6da50ebca4e984d + sha256: c40d7db760296bf9c776de12597d2f379f30e890b9ae70c1de962ff2aa1999f6 category: main optional: false -- name: fenics-ufl - version: 2023.1.1 +- name: libgomp + version: 14.1.0 manager: conda platform: linux-64 dependencies: - numpy: '' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/fenics-ufl-2023.1.1-pyhd8ed1ab_1.conda + _libgcc_mutex: '0.1' + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda hash: - md5: 07040acaab0ab2cc2d173cc762983d58 - sha256: 341272328e654a4c55328c6e8db422c89816e925bfd91f3aa86b2d63c8801e11 + md5: 23c255b008c4f2ae008f81edcabaca89 + sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 category: main optional: false -- name: gst-plugins-base - version: 1.22.6 +- name: liblapack + version: 3.9.0 manager: conda platform: linux-64 dependencies: - __glibc: '>=2.17,<3.0.a0' - alsa-lib: '>=1.2.10,<1.2.11.0a0' - gettext: '>=0.21.1,<1.0a0' - gstreamer: 1.22.6 - libexpat: '>=2.5.0,<3.0a0' - libgcc-ng: '>=12' - libglib: '>=2.78.0,<3.0a0' - libogg: '>=1.3.4,<1.4.0a0' - libopus: '>=1.3.1,<2.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libvorbis: '>=1.3.7,<1.4.0a0' - libxcb: '>=1.15,<1.16.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxau: '>=1.0.11,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxrender: '>=0.9.11,<0.10.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda hash: - md5: 3d8e98279bad55287f2ef9047996f33c - sha256: 07e71ef8ad4d1516695132ed142ef6bc6393243fee54f950aa0944561f2f277f + md5: fd540578678aefe025705f4b58b36b2e + sha256: a15da20c3c0fb5f356e5b4e2f1e87b0da11b9a46805a7f2609bf30f23453831a category: main optional: false -- name: h5py - version: 3.10.0 +- name: libnsl + version: 2.0.1 manager: conda platform: linux-64 dependencies: - cached-property: '' - hdf5: '>=1.14.2,<1.14.3.0a0' libgcc-ng: '>=12' - numpy: '>=1.22.4,<2.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.10.0-nompi_py310ha2ad45a_100.conda - hash: - md5: dab51c266c6be866e0a5d101bfb0cd58 - sha256: f1af30a4d13b65667f3878b072187f61a80299ea44b47f996612d955bff58b0a - category: main - optional: false -- name: jsonschema - version: 4.19.1 - manager: conda - platform: linux-64 - dependencies: - attrs: '>=22.2.0' - importlib_resources: '>=1.4.0' - jsonschema-specifications: '>=2023.03.6' - pkgutil-resolve-name: '>=1.3.10' - python: '>=3.8' - referencing: '>=0.28.4' - rpds-py: '>=0.7.1' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.19.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda hash: - md5: 78aff5d2af74e6537c1ca73017f01f4f - sha256: b4e50e1d53b984a467e79b7ba69cc408d14e3a2002cad4eaf7798e20268cff2d + md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 + sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 category: main optional: false -- name: libass - version: 0.17.1 +- name: libopenblas + version: 0.3.27 manager: conda platform: linux-64 dependencies: - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - fribidi: '>=1.0.10,<2.0a0' - harfbuzz: '>=8.1.1,<9.0a0' - libexpat: '>=2.5.0,<3.0a0' libgcc-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.1-h8fe9dca_1.conda - hash: - md5: c306fd9cc90c0585171167d09135a827 - sha256: 1bc3e44239a11613627488b7a9b6c021ec6b52c5925abd666832db0cb2a59f05 - category: main - optional: false -- name: mumps-mpi - version: 5.2.1 - manager: conda - platform: linux-64 - dependencies: - libblas: '>=3.8.0,<4.0a0' - libgcc-ng: '>=10.3.0' libgfortran-ng: '' - libgfortran5: '>=10.3.0' - liblapack: '>=3.8.0,<4.0a0' - metis: '>=5.1.0,<5.1.1.0a0' - mumps-include: 5.2.1 - openmpi: '>=4.1.2,<5.0a0' - parmetis: '>=4.0.3,<4.1.0a0' - ptscotch: '>=6.0.9,<6.0.10.0a0' - scalapack: '>=2.2.0,<2.3.0a0' - scotch: '>=6.0.9,<6.0.10.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/mumps-mpi-5.2.1-hfb3545b_11.tar.bz2 + libgfortran5: '>=12.3.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda hash: - md5: 8cf8da26a2d1e4a6f07fc6597fd42de6 - sha256: fee9b22355697ffa0ed288d1cb01d70041563c6c4c9d8822091f9654c9be2029 + md5: ae05ece66d3924ac3d48b4aa3fa96cec + sha256: 714cb82d7c4620ea2635a92d3df263ab841676c9b183d0c01992767bb2451c39 category: main optional: false -- name: scipy - version: 1.11.3 +- name: libsqlite + version: 3.46.1 manager: conda platform: linux-64 dependencies: - libblas: '>=3.9.0,<4.0a0' - libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - numpy: '>=1.22.4,<2.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py310hb13e2d6_1.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libzlib: '>=1.3.1,<2.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda hash: - md5: 4260b359d8fbeab4f789a8b0f968079f - sha256: bb8cdaf0869979ef58b3c10491f235c0fabf0b091e591361d25a4ffd47d6aded + md5: 36f79405ab16bf271edb55b213836dac + sha256: 9851c049abafed3ee329d6c7c2033407e2fc269d33a75c071110ab52300002b0 category: main optional: false -- name: fenics-basix-pybind11-abi - version: 0.4.12 +- name: libstdcxx + version: 14.1.0 manager: conda platform: linux-64 dependencies: - fenics-basix: 0.6.0 - url: https://conda.anaconda.org/conda-forge/linux-64/fenics-basix-pybind11-abi-0.4.12-heada3b3_5.conda + libgcc: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda hash: - md5: 07c81f355e2248916d6244e3b65df73b - sha256: c713144b13d81825ef50a79422764fc282568ec00841affff6aad328b25661b7 + md5: 9dbb9699ea467983ba8a4ba89b08b066 + sha256: 44decb3d23abacf1c6dd59f3c152a7101b7ca565b4ef8872804ceaedcc53a9cd category: main optional: false -- name: fenics-ffcx - version: 0.6.0 +- name: libuuid + version: 2.38.1 manager: conda platform: linux-64 dependencies: - cffi: '' - fenics-basix: 0.6.* - fenics-ufl: 2023.1.* - numpy: '' - python: '>=3.7' - setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/fenics-ffcx-0.6.0-pyh56297ac_0.conda + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda hash: - md5: cf36f595191568ba5aac38dbeeb6298c - sha256: facf23ffc9713262992a0f410d67ce9e3a2fa870662211acfe995b579b40569b + md5: 40b61aab5c7ba9ff276c41cfffe6b80b + sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 category: main optional: false -- name: ffmpeg - version: 6.0.0 +- name: libxcrypt + version: 4.4.36 manager: conda platform: linux-64 dependencies: - aom: '>=3.6.1,<3.7.0a0' - bzip2: '>=1.0.8,<2.0a0' - dav1d: '>=1.2.1,<1.2.2.0a0' - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - gmp: '>=6.2.1,<7.0a0' - gnutls: '>=3.7.8,<3.8.0a0' - lame: '>=3.100,<3.101.0a0' - libass: '>=0.17.1,<0.17.2.0a0' libgcc-ng: '>=12' - libopus: '>=1.3.1,<2.0a0' - libstdcxx-ng: '>=12' - libva: '>=2.20.0,<3.0a0' - libvpx: '>=1.13.0,<1.14.0a0' - libxml2: '>=2.11.5,<2.12.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - openh264: '>=2.3.1,<2.3.2.0a0' - svt-av1: '>=1.7.0,<1.7.1.0a0' - x264: '>=1!164.3095,<1!165' - x265: '>=3.5,<3.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.0.0-gpl_h334edf3_105.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda hash: - md5: d47c3e10d2ca5fc07107d4ac640603da - sha256: f1f9070190bc189b9ec9034e9d9adbbb530cd25b571c763b33585195c0e13813 + md5: 5aa797f8787fe7a17d1b0821485b5adc + sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c category: main optional: false -- name: matplotlib-base - version: 3.8.0 +- name: libzlib + version: 1.3.1 manager: conda platform: linux-64 dependencies: - certifi: '>=2020.06.20' - contourpy: '>=1.0.1' - cycler: '>=0.10' - fonttools: '>=4.22.0' - freetype: '>=2.12.1,<3.0a0' - kiwisolver: '>=1.0.1' libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - numpy: '>=1.22.4,<2.0a0' - packaging: '>=20.0' - pillow: '>=6.2.0' - pyparsing: '>=2.3.1' - python: '>=3.10,<3.11.0a0' - python-dateutil: '>=2.7' - python_abi: 3.10.* - tk: '>=8.6.13,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.0-py310h62c0568_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda hash: - md5: 5c0d101ef8fc542778aa80795a759d08 - sha256: 220052334fb2b01b5a487ddf6953c1c7713b401cc0faa0898401422799cdcec1 + md5: 57d7dc60e9325e3de37ff8dffd18e814 + sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d category: main optional: false -- name: netcdf4 - version: 1.6.4 +- name: ncurses + version: '6.5' manager: conda platform: linux-64 dependencies: - certifi: '' - cftime: '' - hdf5: '>=1.14.2,<1.14.3.0a0' + __glibc: '>=2.17,<3.0.a0' libgcc-ng: '>=12' - libnetcdf: '>=4.9.2,<4.9.3.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - numpy: '>=1.22.4,<2.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - setuptools: '' - url: https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310hba70d50_103.conda + url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda hash: - md5: 0850d2a119d51601b20c406a4909af4d - sha256: 43dd515bc3ba60d9c7ecf21639803c56af967901086f38c919ff7c67271474f6 + md5: 70caf8bb6cf39a0b6b7efc885f51c0fe + sha256: 6a1d5d8634c1a07913f1c525db6455918cbc589d745fac46d9d6e30340c8731a category: main optional: false -- name: petsc - version: 3.19.6 +- name: numpy + version: 2.1.1 manager: conda platform: linux-64 dependencies: - fftw: '>=3.3.10,<4.0a0' - hdf5: '>=1.14.2,<1.14.3.0a0' - hypre: '>=2.28.0,<2.28.1.0a0' + __glibc: '>=2.17,<3.0.a0' libblas: '>=3.9.0,<4.0a0' libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' + libgcc: '>=13' liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - metis: '>=5.1.0,<5.2.0a0' - mumps-mpi: '>=5.2.1,<5.2.2.0a0' - openmpi: '>=4.1.5,<5.0a0' - parmetis: '>=4.0.3,<4.1.0a0' - ptscotch: '>=6.0.9,<6.0.10.0a0' - scalapack: '>=2.2.0,<2.3.0a0' - scotch: '>=6.0.9,<6.0.10.0a0' - suitesparse: '>=5.10.1,<5.11.0a0' - superlu: '' - superlu_dist: '>=7.1.1,<8.0a0' - yaml: '>=0.2.5,<0.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/petsc-3.19.6-real_hda1ae68_100.conda + libstdcxx: '>=13' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.1-py312h58c1407_0.conda hash: - md5: b3f27fbfaea8e00d1abb10b0f2401ce8 - sha256: d39fe136656cc6f554b609ad1bf5e0565c2b487aeb7f9b27d71d24a69956950c + md5: 839596d1c1c41f6fc01042e12cb7500c + sha256: 5d7d73f46d929dba57d96e6ef68506a490c89a2599514575c3e33b031e62b244 category: main optional: false -- name: qt-main - version: 5.15.8 +- name: openssl + version: 3.3.2 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - alsa-lib: '>=1.2.10,<1.2.11.0a0' - dbus: '>=1.13.6,<2.0a0' - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - gst-plugins-base: '>=1.22.5,<1.23.0a0' - gstreamer: '>=1.22.5,<1.23.0a0' - harfbuzz: '>=8.2.0,<9.0a0' - icu: '>=73.2,<74.0a0' - krb5: '>=1.21.2,<1.22.0a0' - libclang: '>=15.0.7,<16.0a0' - libclang13: '>=15.0.7' - libcups: '>=2.3.3,<2.4.0a0' - libevent: '>=2.1.12,<2.1.13.0a0' - libexpat: '>=2.5.0,<3.0a0' - libgcc-ng: '>=12' - libglib: '>=2.78.0,<3.0a0' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libpq: '>=15.4,<16.0a0' - libsqlite: '>=3.43.0,<4.0a0' - libstdcxx-ng: '>=12' - libxcb: '>=1.15,<1.16.0a0' - libxkbcommon: '>=1.5.0,<2.0a0' - libxml2: '>=2.11.5,<2.12.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - mysql-libs: '>=8.0.33,<8.1.0a0' - nspr: '>=4.35,<5.0a0' - nss: '>=3.92,<4.0a0' - openssl: '>=3.1.2,<4.0a0' - pulseaudio-client: '>=16.1,<16.2.0a0' - xcb-util: '>=0.4.0,<0.5.0a0' - xcb-util-image: '>=0.4.0,<0.5.0a0' - xcb-util-keysyms: '>=0.4.0,<0.5.0a0' - xcb-util-renderutil: '>=0.3.9,<0.4.0a0' - xcb-util-wm: '>=0.4.1,<0.5.0a0' - xorg-libice: '>=1.1.1,<2.0a0' - xorg-libsm: '>=1.2.4,<2.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-xf86vidmodeproto: '' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-hc47bfe8_16.conda + ca-certificates: '' + libgcc: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda hash: - md5: a8dd2dfcd570e3965c73be6c5e03e74f - sha256: 18dc29e725b620ec857368b40f07c41fd360b6c4071f83b67112eabfc087e8f1 + md5: 4d638782050ab6faa27275bed57e9b4e + sha256: cee91036686419f6dd6086902acf7142b4916e1c4ba042e9ca23e151da012b6d category: main optional: false -- name: wslink - version: 1.12.3 +- name: pip + version: '24.2' manager: conda platform: linux-64 dependencies: - aiohttp: <4 - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/wslink-1.12.3-pyhd8ed1ab_0.conda + python: '>=3.8,<3.13.0a0' + setuptools: '' + wheel: '' + url: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda hash: - md5: 9c9a548737e8238d23d88a7ff33fd253 - sha256: 638a5cfd213217ef59af935d6c12f60a848adfa1d0cd22471f4fa6bc113b419c + md5: 6c78fbb8ddfd64bcb55b5cbafd2d2c43 + sha256: d820e5358bcb117fa6286e55d4550c60b0332443df62121df839eab2d11c890b category: main optional: false -- name: meshio - version: 5.3.4 +- name: pydantic + version: 2.9.2 manager: conda platform: linux-64 dependencies: - h5py: '' - importlib_metadata: '' - netcdf4: '' - numpy: '' + annotated-types: '>=0.6.0' + pydantic-core: 2.23.4 python: '>=3.7' - rich: '' - url: https://conda.anaconda.org/conda-forge/noarch/meshio-5.3.4-pyhd8ed1ab_0.tar.bz2 - hash: - md5: d06b23cad8f0645461da747eafb1e8c6 - sha256: ca3f103cbf8f3f999771cc178b4e8ae5ea458a67a79fdb6bbc7b364bd232c86e - category: main - optional: false -- name: petsc4py - version: 3.19.6 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - numpy: '>=1.22.4,<2.0a0' - openmpi: '>=4.1.6,<5.0a0' - petsc: '>=3.19.6,<3.20.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - url: https://conda.anaconda.org/conda-forge/linux-64/petsc4py-3.19.6-real_hd44ebbc_100.conda - hash: - md5: 3907e4400bae4b4447e3dfdfaffad02c - sha256: 1696dcd78085e7c2a73be6a8d6b1aa7d6c784469c995dd3d9c130d6cc3dbb823 - category: main - optional: false -- name: pyqt - version: 5.15.9 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - pyqt5-sip: 12.12.2 - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - qt-main: '>=5.15.8,<5.16.0a0' - sip: '>=6.7.11,<6.8.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_5.conda + typing-extensions: '>=4.6.1' + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda hash: - md5: f4fe7a6e3d7c78c9de048ea9dda21690 - sha256: 92fe1c9eda6be7879ba798066016c1065047cc13d730105f5109835cbfeae8f1 + md5: 1eb533bb8eb2199e3fef3e4aa147319f + sha256: 1b7b0dc9f6af4da156bf22b0263be70829364a08145c696d3670facff2f6441a category: main optional: false -- name: slepc - version: 3.19.2 +- name: pydantic-core + version: 2.23.4 manager: conda platform: linux-64 dependencies: - libblas: '>=3.9.0,<4.0a0' - libcblas: '>=3.9.0,<4.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - liblapack: '>=3.9.0,<4.0a0' - libstdcxx-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - petsc: '>=3.19.5,<3.20.0a0' - suitesparse: '>=5.10.1,<5.11.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/slepc-3.19.2-real_h3d00a72_100.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + typing-extensions: '>=4.6.0,!=4.7.0' + url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py312h12e396e_0.conda hash: - md5: 3e8e7a314aff0311a40165f2c70fd0e5 - sha256: 416a05c441c97ecf1ed04b9e6c6a5ac57ed99127896d4c29ec29d9c4fdea8576 + md5: 0845ab52d4ea209049129a6a91bc74ba + sha256: 365fde689865087b2a9da636f36678bd59617b324ce7a538b4806e90602b20f1 category: main optional: false -- name: vtk-base - version: 9.2.6 +- name: python + version: 3.12.6 manager: conda platform: linux-64 dependencies: - double-conversion: '>=3.3.0,<3.4.0a0' - eigen: '' - expat: '' - freetype: '>=2.12.1,<3.0a0' - gl2ps: '>=1.4.2,<1.4.3.0a0' - glew: '>=2.1.0,<2.2.0a0' - hdf5: '>=1.14.2,<1.14.3.0a0' - jsoncpp: '>=1.9.5,<1.9.6.0a0' - libexpat: '>=2.5.0,<3.0a0' - libgcc-ng: '>=12' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - libnetcdf: '>=4.9.2,<4.9.3.0a0' - libogg: '>=1.3.4,<1.4.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libsqlite: '>=3.43.0,<4.0a0' - libstdcxx-ng: '>=12' - libtheora: '>=1.1.1,<1.2.0a0' - libtiff: '>=4.6.0,<4.7.0a0' + __glibc: '>=2.17,<3.0.a0' + bzip2: '>=1.0.8,<2.0a0' + ld_impl_linux-64: '>=2.36.1' + libexpat: '>=2.6.3,<3.0a0' + libffi: '>=3.4,<4.0a0' + libgcc: '>=13' + libnsl: '>=2.0.1,<2.1.0a0' + libsqlite: '>=3.46.1,<4.0a0' libuuid: '>=2.38.1,<3.0a0' - libxcb: '>=1.15,<1.16.0a0' - libxml2: '>=2.11.5,<2.12.0a0' - libzlib: '>=1.2.13,<1.3.0a0' - loguru: '' - lz4-c: '>=1.9.3,<1.10.0a0' - nlohmann_json: '' - numpy: '' - proj: '>=9.3.0,<9.3.1.0a0' - pugixml: '>=1.13,<1.14.0a0' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - qt-main: '>=5.15.8,<5.16.0a0' - sqlite: '' - tbb: '>=2021.10.0' - tbb-devel: '' + libxcrypt: '>=4.4.36' + libzlib: '>=1.3.1,<2.0a0' + ncurses: '>=6.5,<7.0a0' + openssl: '>=3.3.2,<4.0a0' + readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' - utfcpp: '' - wslink: '' - xorg-libice: '>=1.1.1,<2.0a0' - xorg-libsm: '>=1.2.4,<2.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxau: '>=1.0.11,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxt: '>=1.3.0,<2.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.2.6-qt_py310h1234567_216.conda - hash: - md5: a82dff490dfa22dfd106e5165e2a76f9 - sha256: ff6b260e12ed9a15c735d5048ab1ef0f4b5b62a5888a27b3c74465cb5158ee1d - category: main - optional: false -- name: fenics-libdolfinx - version: 0.6.0 - manager: conda - platform: linux-64 - dependencies: - fenics-libbasix: '>=0.6.0,<0.6.1.0a0' - fenics-ufcx: '>=0.6.0,<0.6.1.0a0' - hdf5: '>=1.14.2,<1.14.3.0a0' - kahip: '>=3.15,<3.16.0a0' - libadios2: '>=2.9.1,<2.9.2.0a0' - libboost: '>=1.82.0,<1.83.0a0' - libboost-devel: '' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - openmpi: '>=4.1.5,<5.0a0' - parmetis: '>=4.0.3,<4.1.0a0' - petsc: '>=3.19.6,<3.20.0a0' - ptscotch: '>=6.0.9,<6.0.10.0a0' - pugixml: '>=1.13,<1.14.0a0' - slepc: '>=3.19.2,<3.20.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/fenics-libdolfinx-0.6.0-hd8a4704_109.conda - hash: - md5: a4269c1bd85b6ecd8db71ba80abd3da0 - sha256: eeebe1fa79f9653a17ac192f00bca2f574cee6f8ac0ce2ea6eba9d80b8a738a8 - category: main - optional: false -- name: matplotlib - version: 3.8.0 - manager: conda - platform: linux-64 - dependencies: - matplotlib-base: '>=3.8.0,<3.8.1.0a0' - pyqt: '>=5.10' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - tornado: '>=5' - url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.8.0-py310hff52083_2.conda - hash: - md5: cda26b4d722d7319ce66df50332ff09b - sha256: 7262fdcd2974ab6aa864524c48703a81dc1c7891a5180da3cc3a72abafd2fc9b - category: main - optional: false -- name: slepc4py - version: 3.19.2 - manager: conda - platform: linux-64 - dependencies: - libgcc-ng: '>=12' - numpy: '>=1.22.4,<2.0a0' - openmpi: '>=4.1.5,<5.0a0' - petsc: '>=3.19.5,<3.20.0a0' - petsc4py: 3.19.* - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - slepc: '>=3.19.2,<3.20.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/slepc4py-3.19.2-real_hf1ee65d_101.conda + tzdata: '' + xz: '>=5.2.6,<6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.6-hc5c86c4_1_cpython.conda hash: - md5: c76368df8c4d20c30c4b6749641fdc42 - sha256: 23bca2db1bc84e1d4842f56baaadc3bde795d9489190e0ec0565bab7a7bf39e8 + md5: 00836baacdca254f28c54d2543e97514 + sha256: abae02ac635147181e6b7d4b3c8fde89d298d354ed23576853b86bc1384c50f6 category: main optional: false -- name: vtk-io-ffmpeg - version: 9.2.6 +- name: python_abi + version: '3.12' manager: conda platform: linux-64 - dependencies: - ffmpeg: '>=6.0.0,<7.0a0' - vtk-base: 9.2.6 - url: https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.2.6-qt_py310h1234567_216.conda + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda hash: - md5: d9a5e5dc290dd038bf958f6a48772530 - sha256: 00d47cb873f3de378e1b142d186d728e281f9d3cf87508125dadca9298f089f8 + md5: 0424ae29b104430108f5218a66db7260 + sha256: d10e93d759931ffb6372b45d65ff34d95c6000c61a07e298d162a3bc2accebb0 category: main optional: false -- name: fenics-dolfinx - version: 0.6.0 +- name: readline + version: '8.2' manager: conda platform: linux-64 dependencies: - cffi: '' - fenics-basix: 0.6.* - fenics-basix-pybind11-abi: 0.4.12 - fenics-ffcx: 0.6.* - fenics-libdolfinx: 0.6.0 - fenics-ufl: 2023.1.* - gxx_linux-64: 12.* - hdf5: '>=1.14.2,<1.14.3.0a0' - kahip-python: '' libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - mpi4py: '' - numpy: '' - openmpi: '>=4.1.5,<5.0a0' - petsc: '>=3.19.6,<3.20.0a0' - petsc4py: '' - pkg-config: '' - pybind11-abi: '4' - python: '>=3.10,<3.11.0a0' - python_abi: 3.10.* - slepc: '>=3.19.2,<3.20.0a0' - slepc4py: '' - url: https://conda.anaconda.org/conda-forge/linux-64/fenics-dolfinx-0.6.0-py310hf26a831_109.conda - hash: - md5: 4956a7cc80be6c26f341aaa39d2bc99a - sha256: 2f2e1dca7e1b7773def590ac451f55de00715ad660eacc2c0ad63d95c3fd319a - category: main - optional: false -- name: vtk - version: 9.2.6 - manager: conda - platform: linux-64 - dependencies: - vtk-base: 9.2.6 - vtk-io-ffmpeg: 9.2.6 - url: https://conda.anaconda.org/conda-forge/linux-64/vtk-9.2.6-qt_py310h1234567_216.conda + ncurses: '>=6.3,<7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda hash: - md5: 28c1d6fdf662ae0b1eb45945da7b9a81 - sha256: 7bac072a71e4a27af1d5e9c8d2caccaccc0138547738b9088dc34291934163de + md5: 47d31b792659ce70f470b5c82fdfb7a4 + sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 category: main optional: false -- name: occt - version: 7.7.2 +- name: setuptools + version: 75.1.0 manager: conda platform: linux-64 dependencies: - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freeimage: '>=3.18.0,<3.19.0a0' - freetype: '>=2.12.1,<3.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - rapidjson: '' - vtk: '>=9.2.6,<9.2.7.0a0' - xorg-libxt: '>=1.3.0,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/occt-7.7.2-all_h4c9f3c6_201.conda + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda hash: - md5: c3433e65219e92395cba0cc048a45746 - sha256: 6d5d4358b3022387c8ea43345d346594e0576d8ad1d8976f938801b4d355114c + md5: d5cd48392c67fb6849ba459c2c2b671f + sha256: 6725235722095c547edd24275053c615158d6163f396550840aebd6e209e4738 category: main optional: false -- name: gmsh - version: 4.11.1 +- name: tk + version: 8.6.13 manager: conda platform: linux-64 dependencies: - cairo: '>=1.16.0,<2.0a0' - fltk: '>=1.3.8,<1.4.0a0' - gmp: '>=6.2.1,<7.0a0' - libblas: '>=3.9.0,<4.0a0' libgcc-ng: '>=12' - libglu: '' - libjpeg-turbo: '>=2.1.5.1,<3.0a0' - liblapack: '>=3.9.0,<4.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - occt: '>=7.7.2,<7.8.0a0' - xorg-libx11: '>=1.8.6,<2.0a0' - xorg-libxext: '>=1.3.4,<2.0a0' - xorg-libxfixes: '' - xorg-libxmu: '' - xorg-libxrender: '>=0.9.11,<0.10.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/gmsh-4.11.1-h22e7e47_4.conda + libzlib: '>=1.2.13,<2.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda hash: - md5: 1d1b8525e1f0a535f77892e03c0a3917 - sha256: 27f42074ad8e649f86d0ffca1d4ce8b819cd07df2936ae5fe84881054c217fa0 + md5: d453b98d9c83e71da0741bb0ff4d76bc + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e category: main optional: false -- name: python-gmsh - version: 4.11.1 +- name: typing-extensions + version: 4.12.2 manager: conda platform: linux-64 dependencies: - gmsh: '>=4.11.1,<4.11.2.0a0' - numpy: '' - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/python-gmsh-4.11.1-h57928b3_4.conda + typing_extensions: 4.12.2 + url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda hash: - md5: d6cd94a9b194e06eb59b1ce6e5464e24 - sha256: cd832f13039fb46b319232b4fea305c9413d9a46fe9fb3127b7f053134be614b + md5: 52d648bd608f5737b123f510bb5514b5 + sha256: d3b9a8ed6da7c9f9553c5fd8a4fca9c3e0ab712fa5f497859f82337d67533b73 category: main optional: false -- name: myst-parser - version: 2.0.0 +- name: typing_extensions + version: 4.12.2 manager: conda platform: linux-64 dependencies: - docutils: '>=0.16,<0.21' - jinja2: '' - markdown-it-py: '>=3.0.0,<4.0.0' - mdit-py-plugins: '>=0.4,<1' python: '>=3.8' - pyyaml: '' - sphinx: '>=6,<8' - url: https://conda.anaconda.org/conda-forge/noarch/myst-parser-2.0.0-pyhd8ed1ab_0.conda - hash: - md5: 70699181909e468875f12076e1b0a8a9 - sha256: 59cdc52d9875f623a4df82896d80f304e436138f8410cbef969a7e4452c6bab7 - category: main - optional: false -- name: sphinx-gallery - version: 0.14.0 - manager: conda - platform: linux-64 - dependencies: - matplotlib-base: '' - pillow: '' - python: '>=3' - sphinx: '>=1.8.3' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.14.0-pyhd8ed1ab_0.conda - hash: - md5: b3788794f88c9512393032e448428261 - sha256: d9421604023b36e336496bb03461414cd07ced3514ed15c1d9f598178fb9d86e - category: main - optional: false -- name: sphinxcontrib-applehelp - version: 1.0.7 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.9' - sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda - hash: - md5: aebfabcb60c33a89c1f9290cab49bc93 - sha256: 67e2b386c7b3c858ead88fa71fe4fa5eb1f4f59d7994d167b3910a744db392d3 - category: main - optional: false -- name: sphinxcontrib-devhelp - version: 1.0.5 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.9' - sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda - hash: - md5: ebf08f5184d8eaa486697bc060031953 - sha256: 770e13ebfef321426c09ec51d95c57755512db160518b2922a4337546ee51672 - category: main - optional: false -- name: sphinxcontrib-htmlhelp - version: 2.0.4 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.9' - sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda - hash: - md5: a9a89000dfd19656ad004b937eeb6828 - sha256: 5f09cd4a08a6c194c11999871a8c7cedc2cd7edd9ff7ceb6f0667b6698be4cc5 - category: main - optional: false -- name: sphinxcontrib-jquery - version: '4.1' - manager: conda - platform: linux-64 - dependencies: - python: '>=2.7' - sphinx: '>=1.8' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_0.conda - hash: - md5: 914897066d5873acfb13e75705276ad1 - sha256: 2e5f16a2d58f9a31443ffbb8ce3852cfccf533a6349045828cd2e994ef0679ca - category: main - optional: false -- name: sphinx_rtd_theme - version: 1.3.0 - manager: conda - platform: linux-64 - dependencies: - docutils: <0.19 - python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*' - sphinx: '>=1.6,<8' - sphinxcontrib-jquery: '>=4,<5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-1.3.0-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda hash: - md5: a615c369167e508293d8409973b34863 - sha256: 1288aac6167e320b576d89855262f05b1903e446c3dfc92cc67b12b39fb62502 + md5: ebe6952715e1d5eb567eeebf25250fa7 + sha256: 0fce54f8ec3e59f5ef3bb7641863be4e1bf1279623e5af3d3fa726e8f7628ddb category: main optional: false -- name: sphinxcontrib-qthelp - version: 1.0.6 +- name: tzdata + version: 2024a manager: conda platform: linux-64 - dependencies: - python: '>=3.9' - sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda + dependencies: {} + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda hash: - md5: cf5c9649272c677a964a7313279e3a9b - sha256: 9ba5cea9cbab64106e8b5a9b19add855dcb52b8fbb1674398c715bccdbc04471 + md5: 8bfdead4e0fff0383ae4c9c50d0531bd + sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e category: main optional: false -- name: sphinx - version: 7.2.6 +- name: wheel + version: 0.44.0 manager: conda platform: linux-64 dependencies: - alabaster: '>=0.7,<0.8' - babel: '>=2.9' - colorama: '>=0.4.5' - docutils: '>=0.18.1,<0.21' - imagesize: '>=1.3' - importlib-metadata: '>=4.8' - jinja2: '>=3.0' - packaging: '>=21.0' - pygments: '>=2.14' - python: '>=3.9' - requests: '>=2.25.0' - snowballstemmer: '>=2.0' - sphinxcontrib-applehelp: '' - sphinxcontrib-devhelp: '' - sphinxcontrib-htmlhelp: '>=2.0.0' - sphinxcontrib-jsmath: '' - sphinxcontrib-qthelp: '' - sphinxcontrib-serializinghtml: '>=1.1.9' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.2.6-pyhd8ed1ab_0.conda + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda hash: - md5: bbfd1120d1824d2d073bc65935f0e4c0 - sha256: 665d1fe6d20c6cc672ff20e6ebb405860f878b487d3d8d86a5952733fb7bbc42 + md5: d44e3b085abcaef02983c6305b84b584 + sha256: d828764736babb4322b8102094de38074dedfc71f5ff405c9dfee89191c14ebc category: main optional: false -- name: sphinxcontrib-serializinghtml - version: 1.1.9 +- name: xz + version: 5.2.6 manager: conda platform: linux-64 dependencies: - python: '>=3.9' - sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 hash: - md5: 0612e497d7860728f2cda421ea2aec09 - sha256: c5710ae7bb7465f25a29cc845d9fb6ad0ea561972d796d379fcb48d801e96d6d + md5: 2161070d867d1b1204ea749c8eec4ef0 + sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 category: main optional: false diff --git a/environment.yml b/environment.yml index cf458a6..325cf79 100644 --- a/environment.yml +++ b/environment.yml @@ -4,29 +4,8 @@ channels: dependencies: - python>=3.10 # runtime - - fenics-dolfinx=0.6.0 - - mpi4py=3.1.4 - - scipy - - pint - - matplotlib - - python-gmsh=4.11.1 - - meshio=5.3.4 - - jsonschema - # tests - - pytest - - coverage - - toml - # docs - - sphinx - - sphinx-gallery - - myst-parser - - sphinx_rtd_theme - # environment - - conda-ecosystem-user-package-isolation - # formatting - - black - - isort - - pip + - pydantic + - numpy - pip: - -e . diff --git a/src/fenicsxconcrete/bcs.py b/src/fenicsxconcrete/bcs.py new file mode 100644 index 0000000..2bee60e --- /dev/null +++ b/src/fenicsxconcrete/bcs.py @@ -0,0 +1,36 @@ + +from pydantic import ConfigDict, RootModel +from pydantic.dataclasses import dataclass +from pydantic.types import conlist +from typing import Callable, NewType +import numpy as np + +Marker = NewType('Marker', int) + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class DirichletBCDefinition: + """ + Definition of a time- and position-dependent Dirichlet boundary condition. + Note that functions cannot be serialized to JSON! + """ + marker: Marker | list[float] | Callable[[np.ndarray], np.ndarray] + value: conlist(float, min_length=1, max_length=3) | Callable[[np.ndarray, float], np.ndarray] + subspace: int | None + variable: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class NeumannBCDefinition: + """ + Definition of a time- and position-dependent Neumann boundary condition. + """ + marker: Marker + value: conlist(float, min_length=1, max_length=3) | Callable[[np.ndarray, float], np.ndarray] + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class BodyForceDefinition: + value: conlist(float, min_length=1, max_length=3) + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class InitialConditionDefinition: + value: list[float] + variable: str diff --git a/src/fenicsxconcrete/boundary_conditions/__init__.py b/src/fenicsxconcrete/boundary_conditions/__init__.py deleted file mode 100644 index f2e7f22..0000000 --- a/src/fenicsxconcrete/boundary_conditions/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .bcs import BoundaryConditions, get_boundary_dofs -from .boundary import _show_marked, create_facet_tags, line_at, plane_at, point_at, to_floats, within_range diff --git a/src/fenicsxconcrete/boundary_conditions/bcs.py b/src/fenicsxconcrete/boundary_conditions/bcs.py deleted file mode 100644 index 4f58c4e..0000000 --- a/src/fenicsxconcrete/boundary_conditions/bcs.py +++ /dev/null @@ -1,217 +0,0 @@ -"""Easy definition of Dirichlet and Neumann BCs.""" - -from collections.abc import Callable - -import dolfinx -import numpy as np -import ufl - -from fenicsxconcrete.util import LogMixin - - -def get_boundary_dofs(V: dolfinx.fem.FunctionSpace, marker: Callable) -> np.ndarray: - """Returns dofs on the boundary specified by geometrical `marker`. - - Args: - V: function space - marker: marker function - - Returns: - dofs on the boundary specified by geometrical `marker` - """ - - domain = V.mesh - tdim = domain.topology.dim - fdim = tdim - 1 - entities = dolfinx.mesh.locate_entities_boundary(domain, fdim, marker) - dofs = dolfinx.fem.locate_dofs_topological(V, fdim, entities) - g = dolfinx.fem.Function(V) - bc = dolfinx.fem.dirichletbc(g, dofs) - dof_indices = bc.dof_indices()[0] - return dof_indices - - -# adapted version of MechanicsBCs by Thomas Titscher -class BoundaryConditions(LogMixin): - """Handles Dirichlet and Neumann boundary conditions. - - Attributes: - domain: The computational domain. - V: The finite element space. - """ - - def __init__( - self, - domain: dolfinx.mesh.Mesh, - space: dolfinx.fem.FunctionSpace, - facet_tags: np.ndarray | None = None, - ) -> None: - """Initializes the instance based on domain and FE space. - - It sets up lists to hold the Dirichlet and Neumann BCs - as well as the required `ufl` objects to define Neumann - BCs if `facet_tags` is not None. - - Args: - domain: The computational domain. - space: The finite element space. - facet_tags: The mesh tags defining boundaries. - """ - - self.domain = domain - self.V = space - - # create connectivity - tdim = domain.topology.dim - fdim = tdim - 1 - domain.topology.create_connectivity(fdim, tdim) - - # list of dirichlet boundary conditions - self._bcs = [] - - # handle facets and measure for neumann bcs - self._neumann_bcs = [] - self._facet_tags = facet_tags - self._ds = ufl.Measure("ds", domain=domain, subdomain_data=facet_tags) - self._v = ufl.TestFunction(space) - - def add_dirichlet_bc( - self, - value: ( - dolfinx.fem.Function | dolfinx.fem.Constant | dolfinx.fem.DirichletBCMetaClass | np.ndarray | Callable - ), - boundary: int | np.ndarray | Callable | None = None, - sub: int = None, - method: str = "topological", - entity_dim: int | None = None, - ) -> None: - """Adds a Dirichlet bc. - - Args: - value: Anything that *might* be used to define the Dirichlet function. - It can be a `Function`, a `Callable` which is then interpolated - or an already existing Dirichlet BC, or ... (see type hint). - boundary: The part of the boundary whose dofs should be constrained. - This can be a callable defining the boundary geometrically or - an array of entity tags or an integer marking the boundary if - `facet_tags` is not None. - sub: If `sub` is not None the subspace `V.sub(sub)` will be - constrained. - method: A hint which method should be used to locate the dofs. - Choices: 'topological' or 'geometrical'. - entity_dim: The dimension of the entities to be located - topologically. Note that `entity_dim` is required if `sub` - is not None and `method=geometrical`. - """ - if isinstance(value, dolfinx.fem.DirichletBCMetaClass): - self._bcs.append(value) - else: - assert method in ("topological", "geometrical") - V = self.V.sub(sub) if sub is not None else self.V - - # if sub is not None and method=="geometrical" - # dolfinx.fem.locate_dofs_geometrical(V, boundary) will raise a RuntimeError - # because dofs of a subspace cannot be tabulated - topological = method == "topological" or sub is not None - - if topological: - assert entity_dim is not None - - if isinstance(boundary, int): - try: - facets = self._facet_tags.find(boundary) - except AttributeError: - raise AttributeError("There are no facet tags defined!") - if facets.size < 1: - raise ValueError(f"Not able to find facets tagged with value {boundary=}.") - elif isinstance(boundary, np.ndarray): - facets = boundary - else: - facets = dolfinx.mesh.locate_entities_boundary(self.domain, entity_dim, boundary) - - dofs = dolfinx.fem.locate_dofs_topological(V, entity_dim, facets) - else: - dofs = dolfinx.fem.locate_dofs_geometrical(V, boundary) - - try: - bc = dolfinx.fem.dirichletbc(value, dofs, V) - except TypeError: - # value is Function and V cannot be passed - # TODO understand 4th constructor - # see dolfinx/fem/bcs.py line 127 - bc = dolfinx.fem.dirichletbc(value, dofs) - except AttributeError: - # value has no Attribute `dtype` - f = dolfinx.fem.Function(V) - f.interpolate(value) - bc = dolfinx.fem.dirichletbc(f, dofs) - - self._bcs.append(bc) - - def add_neumann_bc(self, marker: int, value: dolfinx.fem.Constant) -> None: - """Adds a Neumann BC. - - Args: - marker: The id of the boundary where Neumann BC should be applied. - value: The Neumann data, e.g. a traction vector. This has - to be a valid `ufl` object. - """ - if marker not in self._facet_tags.values: - raise ValueError(f"No facet tags defined for {marker=}.") - - self._neumann_bcs.append([value, marker]) - - @property - def has_neumann(self) -> bool: - """check if Neumann BCs are defined - - Returns: - True or False - """ - return len(self._neumann_bcs) > 0 - - @property - def has_dirichlet(self) -> bool: - """check if Dirichlet BCs are defined - - Returns: - True or False - """ - return len(self._bcs) > 0 - - @property - def bcs(self) -> list[dolfinx.fem.DirichletBCMetaClass]: - """returns the list of Dirichlet BCs - - Returns: - The list of Dirichlet BCs. - """ - - return self._bcs - - def clear(self, dirichlet: bool = True, neumann: bool = True) -> None: - """Clears list of Dirichlet and/or Neumann BCs. - - Args: - dirichlet: flag for Dirichlet Bcs (if true will clear those) - neumann: flag for Neumann Bcs (if true will clear those) - - """ - - if dirichlet: - self._bcs.clear() - if neumann: - self._neumann_bcs.clear() - - @property - def neumann_bcs(self) -> ufl.form.Form: - """creates the ufl form of (sum of) Neumann BCs - - Returns: - A ufl object representing Neumann BCs - """ - - r = 0 - for expression, marker in self._neumann_bcs: - r += ufl.inner(expression, self._v) * self._ds(marker) - return r diff --git a/src/fenicsxconcrete/boundary_conditions/boundary.py b/src/fenicsxconcrete/boundary_conditions/boundary.py deleted file mode 100644 index d2e826e..0000000 --- a/src/fenicsxconcrete/boundary_conditions/boundary.py +++ /dev/null @@ -1,271 +0,0 @@ -"""Easy definition of boundaries.""" - -import typing -from collections.abc import Callable - -import dolfinx -import numpy as np - -"""Design - -old dolfin: - here on needed a SubDomain object that defined the boundary geometrically. - SubDomain could then be passed to DirichletBC. - Therefore, fenics_helpers.boundary was used to conveniently define - boundaries geometrically (would return a SubDomain). - -dolfinx: - input to dirichletbc is now: - 1. (Function, array) - 2. ([Constant, array], array, FunctionSpace) - The array are the boundary_dofs which are determined via - `locate_dofs_topological` or `locate_dofs_geometrical`. - - Thus, multi.boundary could provide functions to: - (a) define callables that define complex geometry as input to - locate_dofs_geometrical. - (b) define functions that compute entities of the mesh and pass - this array to locate_dofs_topological. - - (b) might use dolfinx.mesh.locate_entities and - dolfinx.mesh.locate_entities_boundary - - Args: - mesh: dolfinx.mesh.Mesh - dim: tdim of the entities - marker: function that takes an array of points x and - returns an array of booleans - - --> therefore, use of locate_dofs_topological again boils down - to a geometrical description of the boundary to be defined. - The only difference is the possibility to filter wrt the tdim. - (this is not possible with locate_dofs_geometrical) - -""" - - -def plane_at(coordinate: float, dim: str | int) -> Callable: - """Defines a plane where `x[dim]` equals `coordinate`. - - Args: - coordinate: value - dim: dimension - - Returns: - function defining the boundary - """ - - if dim in ["x", "X"]: - dim = 0 - if dim in ["y", "Y"]: - dim = 1 - if dim in ["z", "Z"]: - dim = 2 - - assert dim in (0, 1, 2) - - def boundary(x): - return np.isclose(x[dim], coordinate) - - return boundary - - -def line_at(coordinates: list[float], dims: list[str | int]) -> Callable: - """Defines a line where `x[dims[0]]` equals `coordinates[0]` and `x[dims[1]]` equals `coordinates[1]`. - - Args: - coordinates: list of values - dims: list of dimension - - Returns: - function defining the boundary - """ - - assert len(coordinates) == 2 - assert len(dims) == 2 - - # transform x,y,z str into integer - for i, dim in enumerate(dims): - if dim in ["x", "X"]: - dims[i] = 0 - elif dim in ["y", "Y"]: - dims[i] = 1 - elif dim in ["z", "Z"]: - dims[i] = 2 - assert dims[i] in (0, 1, 2) - - assert dims[0] != dims[1] - - def boundary(x): - return np.logical_and( - np.isclose(x[dims[0]], coordinates[0]), - np.isclose(x[dims[1]], coordinates[1]), - ) - - return boundary - - -def within_range( - start: typing.Iterable[int] | typing.Iterable[float], - end: typing.Iterable[int] | typing.Iterable[float], - tol: float = 1e-6, -) -> Callable: - """Defines a range. - - It is best used together with `dolfinx.mesh.locate_entities_boundary` - and topological definition of the Dirichlet BC, because the Callable - will mark the whole range and not just the boundary. - - Args: - start: The start point of the range. - end: The end point of the range. - - Returns: - function defining the boundary - """ - start = to_floats(start) - end = to_floats(end) - - # adjust the values such that start < end for all dimensions - assert len(start) == 3 - assert len(start) == len(end) - for i in range(len(start)): - if start[i] > end[i]: - start[i], end[i] = end[i], start[i] - - def boundary(x): - def in_range(i): - return np.logical_and(x[i] >= start[i] - tol, x[i] <= end[i] + tol) - - xy = np.logical_and(in_range(0), in_range(1)) - return np.logical_and(xy, in_range(2)) - - return boundary - - -def point_at(coord: typing.Iterable[int] | typing.Iterable[float]) -> Callable: - """Defines a point. - - Args: - coord: points coordinates - - Returns: - function defining the boundary - """ - p = to_floats(coord) - - def boundary(x): - return np.logical_and( - np.logical_and(np.isclose(x[0], p[0]), np.isclose(x[1], p[1])), - np.isclose(x[2], p[2]), - ) - - return boundary - - -def _show_marked( - domain: dolfinx.mesh.Mesh, - marker: Callable, - filename: str | None = None, -) -> None: # pragma: no cover - """Shows dof coordinates marked by `marker`. - - Notes: - This is useful for debugging boundary conditions. - Currently this only works for domains of topological - dimension 2. - - Args: - domain: The computational domain. - marker: A function that takes an array of points ``x`` with shape - ``(gdim, num_points)`` and returns an array of booleans of - length ``num_points``, evaluating to ``True`` for entities whose - degree-of-freedom should be returned. - filename: Save figure to this path. - If None, the figure is shown (default). - """ - try: - import matplotlib.pyplot as plt - except ImportError: - raise ImportError("matplotlib is required to show marked dofs.") - - tdim = domain.topology.dim - if tdim in (1, 3): - raise NotImplementedError(f"Not implemented for mesh of topological dimension {tdim=}.") - - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - dofs = dolfinx.fem.locate_dofs_geometrical(V, marker) - u = dolfinx.fem.Function(V) - bc = dolfinx.fem.dirichletbc(u, dofs) - x_dofs = V.tabulate_dof_coordinates() - x_dofs = x_dofs[:, :2] - marked = x_dofs[bc.dof_indices()[0]] - - plt.figure(1) - x, y = x_dofs.T - plt.scatter(x, y, facecolors="none", edgecolors="k", marker="o") - xx, yy = marked.T - plt.scatter(xx, yy, facecolors="r", edgecolors="none", marker="o") - - if filename is not None: - plt.savefig(filename) - else: - plt.show() - - -def to_floats(x: typing.Iterable[int] | typing.Iterable[float]) -> list[float]: - """Converts `x` to a 3d coordinate. - - Args: - x: point coordinates at least 1D - - Returns: - point described as list with x,y,z value - """ - - floats = [] - try: - for v in x: - floats.append(float(v)) - while len(floats) < 3: - floats.append(0.0) - except TypeError: - floats = [float(x), 0.0, 0.0] - - return floats - - -def create_facet_tags( - mesh: dolfinx.mesh.Mesh, boundaries: dict[str, tuple[int, Callable]] -) -> tuple[np.ndarray, dict[str, int]]: - """Creates facet tags for the given mesh and boundaries. - - This code is part of the FEniCSx tutorial - by Jørgen S. Dokken. - See https://jsdokken.com/dolfinx-tutorial/chapter3/robin_neumann_dirichlet.html?highlight=sorted_facets#implementation # noqa: E501 - - Args: - mesh: The computational domain. - boundaries: The definition of boundaries where each key is a string - and each value is a tuple of an integer and a marker function. - - Returns: - A tuple (facet_tags, marked_boundary) where facet_tags is an array - with dtype int and marked_boundary is a dict where each key is a string - and each value is an int. - """ - - facet_indices, facet_markers = [], [] - fdim = mesh.topology.dim - 1 - marked_boundary = {} - for key, (marker, locator) in boundaries.items(): - facets = dolfinx.mesh.locate_entities(mesh, fdim, locator) - facet_indices.append(facets) - facet_markers.append(np.full_like(facets, marker)) - if facets.size > 0: - marked_boundary[key] = marker - facet_indices = np.hstack(facet_indices).astype(np.int32) - facet_markers = np.hstack(facet_markers).astype(np.int32) - sorted_facets = np.argsort(facet_indices) - facet_tags = dolfinx.mesh.meshtags(mesh, fdim, facet_indices[sorted_facets], facet_markers[sorted_facets]) - return facet_tags, marked_boundary diff --git a/src/fenicsxconcrete/experiment.py b/src/fenicsxconcrete/experiment.py new file mode 100644 index 0000000..9bfc745 --- /dev/null +++ b/src/fenicsxconcrete/experiment.py @@ -0,0 +1,35 @@ +from pydantic import RootModel +from bcs import DirichletBCDefinition, InitialConditionDefinition, NeumannBCDefinition, BodyForceDefinition +from mesh import MeshGenerator +from material import MaterialDefinition, LinearElasticMaterial +from pydantic.dataclasses import dataclass + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class Experiment: + dirichlet_bcs: list[DirichletBCDefinition] | None + neumann_bcs: list[NeumannBCDefinition] | None + initial_conditions: list[InitialConditionDefinition] | None + body_forces: list[BodyForceDefinition] | None + geometry: MeshGenerator + solution_fields: list[str] + time: tuple[float, float] + material: MaterialDefinition + name: str = "experiment" + + + + +if __name__ == "__main__": + bc = DirichletBCDefinition(marker=1, value=[0.,0.], subspace=0, variable='displacement') + neumann = NeumannBCDefinition(marker=2, value=[-42.0], variable='displacement') + initial = InitialConditionDefinition(value=[42.24], variable='density') + body_force = BodyForceDefinition(value=[0.,0., 9.81], variable='displacement') + mat = LinearElasticMaterial(name='steel', mu=1., lam=2.) + geo = MeshGenerator(parameters={'length': (1, 'm')}, mesh_tags={'left': 0, 'right': 1, 'top': 2, 'bottom': 3}) + solution_fields = ['displacement'] + time = (0., 1.) + exp = Experiment(dirichlet_bcs=[bc], neumann_bcs=[neumann], initial_conditions=[initial], body_forces=[body_force], geometry=geo, solution_fields=solution_fields, time=time, material=mat) + + + print( RootModel[Experiment](exp).model_dump_json(indent=4)) + diff --git a/src/fenicsxconcrete/experimental_setup/__init__.py b/src/fenicsxconcrete/experimental_setup/__init__.py deleted file mode 100644 index 4dd540f..0000000 --- a/src/fenicsxconcrete/experimental_setup/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .am_multiple_layers import AmMultipleLayers -from .base_experiment import Experiment -from .cantilever_beam import CantileverBeam -from .compression_cylinder import CompressionCylinder -from .simple_beam import SimpleBeam -from .simple_cube import SimpleCube -from .tensile_beam import TensileBeam diff --git a/src/fenicsxconcrete/experimental_setup/am_multiple_layers.py b/src/fenicsxconcrete/experimental_setup/am_multiple_layers.py deleted file mode 100644 index 3c0d439..0000000 --- a/src/fenicsxconcrete/experimental_setup/am_multiple_layers.py +++ /dev/null @@ -1,155 +0,0 @@ -from collections.abc import Callable - -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.boundary_conditions.boundary import plane_at -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import Parameters, QuadratureRule, ureg - - -class AmMultipleLayers(Experiment): - """sets up a simple layered structure - - all layers of the same height are on top of each other, the boundary on the bottom is fixed - the mesh includes all (activation via pseudo-density) - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity]): - """initializes the object, for the rest, see base class - - Args: - parameters: dictionary containing the required parameters for the experiment set-up - see default_parameters for a first guess - - """ - - super().__init__(parameters) - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - setup_parameters = {} - setup_parameters["degree"] = 2 * ureg("") # polynomial degree - # geometry - setup_parameters["dim"] = 2 * ureg("") - setup_parameters["num_layers"] = 10 * ureg("") # number of layers in y - setup_parameters["layer_length"] = 0.5 * ureg("m") # x_dimension - setup_parameters["layer_height"] = 0.01 * ureg("m") # Dy dimension - # only relevant for 3D case [z-dimension] - setup_parameters["layer_width"] = 0.05 * ureg("m") - - # mesh - setup_parameters["num_elements_layer_length"] = 10 * ureg("") - setup_parameters["num_elements_layer_height"] = 1 * ureg("") - # only relevant for 3D case - setup_parameters["num_elements_layer_width"] = 2 * ureg("") - - return setup_parameters - - def setup(self) -> None: - """defines the mesh for 2D and 3D - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - self.logger.debug("setup mesh for %s", self.p["dim"]) - - if self.p["dim"] == 2: - self.mesh = df.mesh.create_rectangle( - comm=MPI.COMM_WORLD, - points=[(0.0, 0.0), (self.p["layer_length"], self.p["num_layers"] * self.p["layer_height"])], - n=(self.p["num_elements_layer_length"], self.p["num_layers"] * self.p["num_elements_layer_height"]), - cell_type=df.mesh.CellType.quadrilateral, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - comm=MPI.COMM_WORLD, - points=[ - (0.0, 0.0, 0.0), - (self.p["layer_length"], self.p["layer_width"], self.p["num_layers"] * self.p["layer_height"]), - ], - n=[ - self.p["num_elements_layer_length"], - self.p["num_elements_layer_width"], - self.p["num_layers"] * self.p["num_elements_layer_height"], - ], - cell_type=df.mesh.CellType.hexahedron, - ) - else: - raise ValueError(f'wrong dimension: {self.p["dim"]} is not implemented for problem setup') - - def create_displacement_boundary(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass]: - """defines displacement boundary as fixed at bottom - - Args: - V: function space - - Returns: - list of dirichlet boundary conditions - - """ - - bc_generator = BoundaryConditions(self.mesh, V) - - if self.p["dim"] == 2: - # fix dofs at bottom - bc_generator.add_dirichlet_bc( - np.array([0.0, 0.0], dtype=ScalarType), - boundary=self.boundary_bottom(), - method="geometrical", - entity_dim=self.mesh.topology.dim - 1, # line - ) - - elif self.p["dim"] == 3: - # fix dofs at bottom - bc_generator.add_dirichlet_bc( - np.array([0.0, 0.0, 0.0], dtype=ScalarType), - boundary=self.boundary_bottom(), - method="geometrical", - entity_dim=self.mesh.topology.dim - 1, # surface - ) - - return bc_generator.bcs - - def create_body_force_am( - self, v: ufl.argument.Argument, q_fd: df.fem.Function, rule: QuadratureRule - ) -> ufl.form.Form: - """defines body force for am experiments - - element activation via pseudo density and incremental loading via parameter ["load_time"] computed in class concrete_am - - Args: - v: test function - q_fd: quadrature function given the loading increment where elements are active - rule: rule for the quadrature function - - Returns: - form for body force - - """ - - force_vector = np.zeros(self.p["dim"]) - force_vector[-1] = -self.p["rho"] * self.p["g"] # works for 2D and 3D - - f = df.fem.Constant(self.mesh, ScalarType(force_vector)) - L = q_fd * ufl.dot(f, v) * rule.dx - - return L diff --git a/src/fenicsxconcrete/experimental_setup/base_experiment.py b/src/fenicsxconcrete/experimental_setup/base_experiment.py deleted file mode 100644 index 5167603..0000000 --- a/src/fenicsxconcrete/experimental_setup/base_experiment.py +++ /dev/null @@ -1,213 +0,0 @@ -from abc import ABC, abstractmethod -from collections.abc import Callable - -import dolfinx as df -import pint -import ufl - -from fenicsxconcrete.boundary_conditions.boundary import plane_at, point_at -from fenicsxconcrete.util import LogMixin, Parameters, QuadratureRule, ureg - - -class Experiment(ABC, LogMixin): - """base class for experimental setups - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity]) -> None: - """Initialises the parent object - - This is needs to be called by children - Constant parameters are defined here - - Args: - parameters: parameter dictionary with units - - """ - - # initialize parameter attributes - setup_parameters = Parameters() - - # setting up default setup parameters defined in each child - default_p = self.default_parameters() - setup_parameters.update(default_p) - # update with input parameters - setup_parameters.update(parameters) - - # get logger info which parameters are set to default values - # plus check dimensionality of input parameters - keys_set_default = [] - for key in dict(default_p): - if key not in parameters: - keys_set_default.append(key) - else: - # check if units are compatible - dim_given = parameters[key].dimensionality - dim_default = default_p[key].dimensionality - if dim_given != dim_default: - raise ValueError( - f"given units for {key} are not compatible with default units: {dim_given} != {dim_default}" - ) - self.logger.info(f"for the following parameters, the default values are used: {keys_set_default}") - - # as attribute - self.parameters = setup_parameters - - # remove units for use in fem model - self.p = self.parameters.to_magnitude() - - self.setup() - - @abstractmethod - def setup(self): - """Is called by init, must be defined by child""" - - @staticmethod - @abstractmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - must be defined in each child - - Returns: - a dictionary with required parameters and a set of working values as example - - """ - - pass - - @abstractmethod - def create_displacement_boundary(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass] | None: - """defines empty displacement boundary conditions (to be done in child) - - this function is abstract until there is a need for a material that does not need a displacement boundary - once that is required, just make this a normal function that returns an empty list - - Args: - V: function space - - Returns: - if defined a list with displacement boundary conditions otherwise None - - """ - - def create_force_boundary(self, v: ufl.argument.Argument | None = None) -> ufl.form.Form | None: - """defines empty force boundary (to be done in child) - - Args: - v: test function - - Returns: - if defined a form for the force otherwise None - - """ - - pass - - def create_body_force(self, v: ufl.argument.Argument | None = None) -> ufl.form.Form | None: - """defines empty body force function - - Args: - v: test function - - Returns: - if defined a form for the body force otherwise None - - """ - - pass - - def create_body_force_am( - self, - v: ufl.argument.Argument | None = None, - q_fd: df.fem.Function | None = None, - rule: QuadratureRule | None = None, - ) -> ufl.form.Form | None: - """defines empty body force function for am case - - Args: - v: test function - q_fd: quadrature function given the loading increment where elements are active - rule: rule for the quadrature function - - Returns: - if defined a form for the body force otherwise None - - """ - - pass - - def boundary_top(self) -> Callable: - """specifies boundary: plane at top - - Returns: - fct defining if dof is at boundary - - """ - if self.p["dim"] == 2: - return plane_at(self.p["height"], 1) - elif self.p["dim"] == 3: - return plane_at(self.p["height"], 2) - - def boundary_bottom(self) -> Callable: - """specifies boundary: plane at bottom - - Returns: fct defining if dof is at boundary - - """ - if self.p["dim"] == 2: - return plane_at(0.0, "y") - elif self.p["dim"] == 3: - return plane_at(0.0, "z") - - def boundary_left(self) -> Callable: - """specifies boundary: plane at left side - - Returns: - fct defining if dof is at boundary - - """ - if self.p["dim"] == 2: - return plane_at(0.0, "x") - elif self.p["dim"] == 3: - return plane_at(0.0, "x") - - def boundary_right(self) -> Callable: - """specifies boundary: plane at left side - - Returns: - fct defining if dof is at boundary - - """ - if self.p["dim"] == 2: - return plane_at(self.p["length"], "x") - elif self.p["dim"] == 3: - return plane_at(self.p["length"], "x") - - def boundary_front(self) -> Callable: - """specifies boundary: plane at front - - only for 3D case front plane - - Returns: - fct defining if dof is at boundary - - """ - if self.p["dim"] == 3: - return plane_at(0.0, "y") - - def boundary_back(self) -> Callable: - """specifies boundary: plane at front - - only for 3D case front plane - - Returns: - fct defining if dof is at boundary - - """ - if self.p["dim"] == 3: - return plane_at(self.p["width"], "y") diff --git a/src/fenicsxconcrete/experimental_setup/cantilever_beam.py b/src/fenicsxconcrete/experimental_setup/cantilever_beam.py deleted file mode 100644 index 7b06b38..0000000 --- a/src/fenicsxconcrete/experimental_setup/cantilever_beam.py +++ /dev/null @@ -1,129 +0,0 @@ -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import Parameters, ureg - - -class CantileverBeam(Experiment): - """Sets up a cantilever beam, clamped on one side and loaded with gravity - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity] | None = None): - """initializes the object, for the rest, see base class - - Args: - parameters: dictionary containing the required parameters for the experiment set-up - see default_parameters for a first guess - - """ - - super().__init__(parameters) - - def setup(self) -> None: - """defines the mesh for 2D or 3D - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - if self.p["dim"] == 2: - self.mesh = df.mesh.create_rectangle( - comm=MPI.COMM_WORLD, - points=[(0.0, 0.0), (self.p["length"], self.p["height"])], - n=(self.p["num_elements_length"], self.p["num_elements_height"]), - cell_type=df.mesh.CellType.quadrilateral, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - comm=MPI.COMM_WORLD, - points=[ - (0.0, 0.0, 0.0), - (self.p["length"], self.p["width"], self.p["height"]), - ], - n=[ - self.p["num_elements_length"], - self.p["num_elements_width"], - self.p["num_elements_height"], - ], - cell_type=df.mesh.CellType.hexahedron, - ) - else: - raise ValueError(f'wrong dimension: {self.p["dim"]} is not implemented for problem setup') - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - setup_parameters = {} - - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["height"] = 0.3 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = 3 * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - setup_parameters["num_elements_width"] = 3 * ureg("") # only relevant for 3D case - - return setup_parameters - - def create_displacement_boundary(self, V) -> list: - """defines displacement boundary as fixed at bottom - - Args: - V: function space - - Returns: - list of dirichlet boundary conditions - - """ - - # fenics will individually call this function for every node and will note the true or false value. - def clamped_boundary(x): - return np.isclose(x[0], 0) - - displacement_bcs = [] - - zero = np.zeros(self.p["dim"]) - displacement_bcs.append( - df.fem.dirichletbc( - np.array(zero, dtype=ScalarType), - df.fem.locate_dofs_geometrical(V, clamped_boundary), - V, - ) - ) - - return displacement_bcs - - def create_body_force(self, v: ufl.argument.Argument) -> ufl.form.Form: - """defines body force - - Args: - v: test function - - Returns: - form for body force - - """ - - force_vector = np.zeros(self.p["dim"]) - force_vector[-1] = -self.p["rho"] * self.p["g"] # works for 2D and 3D - - f = df.fem.Constant(self.mesh, ScalarType(force_vector)) - L = ufl.dot(f, v) * ufl.dx - - return L diff --git a/src/fenicsxconcrete/experimental_setup/compression_cylinder.py b/src/fenicsxconcrete/experimental_setup/compression_cylinder.py deleted file mode 100644 index a58c2d2..0000000 --- a/src/fenicsxconcrete/experimental_setup/compression_cylinder.py +++ /dev/null @@ -1,257 +0,0 @@ -import tempfile -from collections.abc import Callable - -import dolfinx as df -import gmsh -import numpy as np -import pint -import ufl -from dolfinx.io import gmshio -from mpi4py import MPI - -from fenicsxconcrete import _GMSH_VERBOSITY -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.boundary_conditions.boundary import plane_at, point_at -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import Parameters, ureg - - -def generate_cylinder_mesh(radius: float, height: float, mesh_density: float, element_degree: int = 2) -> df.mesh.Mesh: - """Uses gmsh to generate a cylinder mesh for fenics - - Args: - radius: radius of the cylinder - height: height of the cylinder - mesh_density: defines the size of the elements and the minimum number of element edges in the height of the cylinder - element_degree: degree of the discretization elements, quadratic geometry by default - - Returns: - cylinder mesh for dolfin - - """ - - # start gmsh - gmsh.initialize() - gmsh.option.setNumber("General.Verbosity", _GMSH_VERBOSITY) # only print warnings etc - gmsh.model.add("cylinder_mesh") # give the model a name - - # generate cylinder geometry with origin in (0,0,0) - # syntax: add_cylinder(x,y,z,dx,dy,dz,radius,angle in radian) - membrane = gmsh.model.occ.addCylinder(0, 0, 0, 0, 0, height, radius, angle=2 * np.pi) - gmsh.model.occ.synchronize() - gdim = 3 - # only physical groups get exported - # syntax: add_physical_group(dim , list of 3d objects, tag) - gmsh.model.addPhysicalGroup(gdim, [membrane], 1) - - # meshing - characteristic_length = height / mesh_density - gmsh.option.setNumber("Mesh.CharacteristicLengthMax", characteristic_length) - # setting for minimal length, arbitrarily chosen as half the max value - gmsh.option.setNumber("Mesh.CharacteristicLengthMin", characteristic_length / 2) - # setting the order of the elements - gmsh.option.setNumber("Mesh.ElementOrder", element_degree) - gmsh.model.mesh.setOrder(element_degree) - gmsh.model.mesh.generate(gdim) - - # write to tmp file - msh_file = tempfile.NamedTemporaryFile(suffix=".msh") - gmsh.write(msh_file.name) - gmsh.finalize() - - # reads in the mesh on a single process - # and then distributes the cells over available ranks - # returns mesh, cell_tags, facet_tags - mesh, _, _ = gmshio.read_from_msh(msh_file.name, MPI.COMM_WORLD, gdim=gdim) - - # tmp file is deleted when closed - msh_file.close() - - return mesh - - -class CompressionCylinder(Experiment): - """A cylinder mesh for a uni-axial displacement load - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity] | None = None) -> None: - """initializes the object, for the rest, see base class - - Standard parameters are set - setup function called - - Args: - parameters: dictionary with parameters that can override the default values - - """ - - super().__init__(parameters) - - # initialize variable top_displacement - self.top_displacement = df.fem.Constant(domain=self.mesh, c=0.0) # applied via fkt: apply_displ_load(...) - - def setup(self) -> None: - """Generates the mesh based on parameters - - This function is called during __init__ - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - - """ - - if self.p["dim"] == 2: - # build a rectangular mesh to approximate a 2D cylinder - self.mesh = df.mesh.create_rectangle( - MPI.COMM_WORLD, - [ - [0.0, 0.0], - [self.p["radius"] * 2, self.p["height"]], - ], - [self.p["mesh_density"], self.p["mesh_density"]], - cell_type=df.mesh.CellType.triangle, - ) - elif self.p["dim"] == 3: - # generates a 3D cylinder mesh based on radius and height - # to reduce approximation errors due to the linear tetrahedron mesh, the mesh radius is iteratively changed - # until the bottom surface area matches that of a circle with the initially defined radius - def create_cylinder_mesh(radius, p): - # generate cylinder mesh using gmsh - mesh = generate_cylinder_mesh(radius, p["height"], p["mesh_density"], p["element_order"]) - facets = df.mesh.locate_entities_boundary(mesh, 2, plane_at(0.0, 2)) - tdim = mesh.topology.dim - f_v = mesh.topology.connectivity(tdim - 1, 0).array.reshape(-1, 3) - entities = df.graph.create_adjacencylist(f_v[facets]) - values = np.full(facets.shape[0], 2, dtype=np.int32) - - ft = df.mesh.meshtags_from_entities(mesh, tdim - 1, entities, values) - ds = ufl.Measure("ds", domain=mesh, subdomain_data=ft) - bottom_area = df.fem.assemble_scalar(df.fem.form(1 * ds(2))) - - return bottom_area, mesh - - if self.p["degree"] == 1: - # create a discretized cylinder mesh with the same cross-sectional area as the round cylinder - target_area = np.pi * self.p["radius"] ** 2 - effective_radius = self.p["radius"] - mesh_area = 0 - area_error = 1e-6 - # - # iteratively improve the radius of the mesh till the bottom area matches the target - while abs(target_area - mesh_area) > target_area * area_error: - # generate mesh - self.p["mesh_radius"] = effective_radius # not required, but maybe interesting as metadata - mesh_area, self.mesh = create_cylinder_mesh(effective_radius, self.p) - # new guess - effective_radius = np.sqrt(target_area / mesh_area) * effective_radius - else: - mesh_area, self.mesh = create_cylinder_mesh(self.p["radius"], self.p) - else: - raise ValueError(f"wrong dimension {self.p['dim']} for problem setup") - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - default_parameters = {} - default_parameters["element_order"] = 2 * ureg("") # polynomial degree - - # boundary setting - default_parameters["bc_setting"] = "free" * ureg("") # boundary setting, two options available: fixed and free - # fixed: constrained at top and bottom in transversal to loading - # free: no confinement perpendicular to loading surface - - # mesh information - default_parameters["dim"] = 3 * ureg("") # dimension of problem, 2D or 3D - # 2D version of the cylinder is a rectangle with plane strain assumption - default_parameters["mesh_density"] = 4 * ureg( - "" - ) # in 3D: number of faces on the side when generating a polyhedral approximation - # in 2D: number of elements in each direction - default_parameters["radius"] = 75 * ureg("mm") # radius of cylinder to approximate in mm - default_parameters["height"] = 100 * ureg("mm") # height of cylinder in mm - default_parameters["degree"] = 2 * ureg("") # polynomial degree of the mesh - - return default_parameters - - def create_displacement_boundary(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass]: - """Defines the displacement boundary conditions - - Args: - V: Function space of the structure - - Returns: - list of DirichletBC objects, defining the boundary conditions - - """ - - # define boundary conditions generator - bc_generator = BoundaryConditions(self.mesh, V) - - if self.p["bc_setting"] == "fixed": - if self.p["dim"] == 2: - bc_generator.add_dirichlet_bc(self.top_displacement, self.boundary_top(), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_top(), 0, "geometrical", 0) - bc_generator.add_dirichlet_bc( - df.fem.Constant(domain=self.mesh, c=(0.0, 0.0)), - self.boundary_bottom(), - None, - "geometrical", - ) - elif self.p["dim"] == 3: - bc_generator.add_dirichlet_bc(self.top_displacement, self.boundary_top(), 2, "geometrical", 2) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_top(), 0, "geometrical", 0) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_top(), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc( - df.fem.Constant(domain=self.mesh, c=(0.0, 0.0, 0.0)), - self.boundary_bottom(), - None, - "geometrical", - ) - - elif self.p["bc_setting"] == "free": - if self.p["dim"] == 2: - bc_generator.add_dirichlet_bc(self.top_displacement, self.boundary_top(), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_bottom(), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc(np.float64(0.0), point_at((0, 0)), 0, "geometrical", 0) - - elif self.p["dim"] == 3: - # getting nodes at the bottom of the mesh to apply correct boundary condition to arbitrary cylinder mesh - mesh_points = self.mesh.geometry.x # list of all nodal coordinates - bottom_points = mesh_points[(mesh_points[:, 2] == 0.0)] # copying the bottom nodes, z coord = 0.0 - - # sorting by x coordinate - x_min_boundary_point = bottom_points[bottom_points[:, 0].argsort(kind="mergesort")][0] - x_max_boundary_point = bottom_points[bottom_points[:, 0].argsort(kind="mergesort")][-1] - # sorting by y coordinate - y_boundary_point = bottom_points[bottom_points[:, 1].argsort(kind="mergesort")][0] - - bc_generator.add_dirichlet_bc(self.top_displacement, self.boundary_top(), 2, "geometrical", 2) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_bottom(), 2, "geometrical", 2) - bc_generator.add_dirichlet_bc(np.float64(0.0), point_at(x_min_boundary_point), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc(np.float64(0.0), point_at(x_max_boundary_point), 1, "geometrical", 1) - bc_generator.add_dirichlet_bc(np.float64(0.0), point_at(y_boundary_point), 0, "geometrical", 0) - else: - raise ValueError(f"Wrong boundary setting: {self.p['bc_setting']}, for cylinder setup") - - return bc_generator.bcs - - def apply_displ_load(self, top_displacement: pint.Quantity | float) -> None: - """Updates the applied displacement load - - Args: - top_displacement: Displacement of the top boundary in mm, > 0 ; tension, < 0 ; compression - - """ - top_displacement.ito_base_units() - self.top_displacement.value = top_displacement.magnitude diff --git a/src/fenicsxconcrete/experimental_setup/simple_beam.py b/src/fenicsxconcrete/experimental_setup/simple_beam.py deleted file mode 100644 index edfecdb..0000000 --- a/src/fenicsxconcrete/experimental_setup/simple_beam.py +++ /dev/null @@ -1,206 +0,0 @@ -from collections.abc import Callable - -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.boundary_conditions.boundary import line_at, point_at -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import Parameters, ureg - - -class SimpleBeam(Experiment): - """Sets up a simply supported beam, fix on the left - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity]) -> None: - """initializes the object, for the rest, see base class - - Args: - parameters: dictionary containing the required parameters for the experiment set-up - see default_parameters for a first guess - - """ - - super().__init__(parameters) - - def setup(self): - """defines the mesh for 2D or 3D - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - if self.p["dim"] == 2: - self.mesh = df.mesh.create_rectangle( - comm=MPI.COMM_WORLD, - points=[(0.0, 0.0), (self.p["length"], self.p["height"])], - n=(self.p["num_elements_length"], self.p["num_elements_height"]), - cell_type=df.mesh.CellType.quadrilateral, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - comm=MPI.COMM_WORLD, - points=[ - (0.0, 0.0, 0.0), - (self.p["length"], self.p["width"], self.p["height"]), - ], - n=[ - self.p["num_elements_length"], - self.p["num_elements_width"], - self.p["num_elements_height"], - ], - cell_type=df.mesh.CellType.hexahedron, - ) - else: - raise ValueError(f'wrong dimension: {self.p["dim"]} is not implemented for problem setup') - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - setup_parameters = {} - - setup_parameters["load"] = 10000 * ureg("N/m^2") - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["height"] = 0.3 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = 3 * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - # only relevant for 3D case - setup_parameters["num_elements_width"] = 3 * ureg("") - - return setup_parameters - - def create_displacement_boundary(self, V) -> list: - """defines displacement boundary as fixed at bottom - - Args: - V: function space - - Returns: - list of dirichlet boundary conditions - - """ - - bc_generator = BoundaryConditions(self.mesh, V) - - if self.p["dim"] == 2: - # fix line in the left - bc_generator.add_dirichlet_bc( - np.array([0.0, 0.0], dtype=ScalarType), - boundary=self.boundary_left(), - method="geometrical", - ) - # line with dof in x direction on the right - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_right(), 1, "geometrical", 0) - - elif self.p["dim"] == 3: - # fix line in the left - bc_generator.add_dirichlet_bc( - np.array([0.0, 0.0, 0.0], dtype=ScalarType), - boundary=self.boundary_left(), - method="geometrical", - ) - # line with dof in x direction on the right - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_right(), 1, "geometrical", 0) - bc_generator.add_dirichlet_bc(np.float64(0.0), self.boundary_right(), 2, "geometrical", 0) - - return bc_generator.bcs - - def create_body_force(self, v: ufl.argument.Argument) -> ufl.form.Form: - """defines body force - - Args: - v: test function - - Returns: - form for body force - - """ - - force_vector = np.zeros(self.p["dim"]) - force_vector[-1] = -self.p["rho"] * self.p["g"] # works for 2D and 3D - - f = df.fem.Constant(self.mesh, ScalarType(force_vector)) - L = ufl.dot(f, v) * ufl.dx - - return L - - def boundary_left(self) -> Callable: - """specifies boundary at bottom - - Returns: - fct defining boundary - - """ - - if self.p["dim"] == 2: - return point_at([0, 0]) - elif self.p["dim"] == 3: - return line_at([0, 0], ["x", "z"]) - - def boundary_right(self) -> Callable: - """specifies boundary at bottom - - Returns: - fct defining boundary - - """ - - if self.p["dim"] == 2: - return point_at([self.p["length"], 0]) - elif self.p["dim"] == 3: - return line_at([self.p["length"], 0], ["x", "z"]) - - def create_force_boundary(self, v: ufl.argument.Argument) -> ufl.form.Form: - """distributed load on top of beam - - Args: - v: test function - - Returns: - form for force boundary - - """ - - # TODO: make this more pretty!!! - # can we use Philipps boundary classes here? - - facet_indices, facet_markers = [], [] - fdim = self.mesh.topology.dim - 1 - - def locator(x): - return np.isclose(x[fdim], self.p["height"]) - - facets = df.mesh.locate_entities(self.mesh, fdim, locator) - facet_indices.append(facets) - facet_markers.append(np.full_like(facets, 1)) - facet_indices = np.hstack(facet_indices).astype(np.int32) - facet_markers = np.hstack(facet_markers).astype(np.int32) - sorted_facets = np.argsort(facet_indices) - facet_tag = df.mesh.meshtags(self.mesh, fdim, facet_indices[sorted_facets], facet_markers[sorted_facets]) - - _ds = ufl.Measure("ds", domain=self.mesh, subdomain_data=facet_tag) - - force_vector = np.zeros(self.p["dim"]) - force_vector[-1] = -self.p["load"] - T = df.fem.Constant(self.mesh, ScalarType(force_vector)) - L = ufl.dot(T, v) * _ds(1) - - return L diff --git a/src/fenicsxconcrete/experimental_setup/simple_cube.py b/src/fenicsxconcrete/experimental_setup/simple_cube.py deleted file mode 100644 index 33e0fca..0000000 --- a/src/fenicsxconcrete/experimental_setup/simple_cube.py +++ /dev/null @@ -1,248 +0,0 @@ -from collections.abc import Callable - -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import LogMixin, Parameters, ureg - - -class SimpleCube(Experiment): - """sets up an uniaxial cube structure with displacement load - - 2D unit square or 3D unit cube with uniaxial boundary conditions - displacement controlled - for material model testing - - Attributes: - parameters: parameter dictionary with units - p: parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity] | None = None) -> None: - """initializes the object, for the rest, see base class - - Args: - parameters: dictionary containing the required parameters for the experiment set-up - see default_parameters for a first guess - """ - - super().__init__(parameters) - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - setup_parameters = {} - - setup_parameters["height"] = 1 * ureg("m") - setup_parameters["width"] = 1 * ureg("m") - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["T_0"] = ureg.Quantity(20.0, ureg.degC) - setup_parameters["T_bc"] = ureg.Quantity(20.0, ureg.degC) - setup_parameters["dim"] = 3 * ureg("") - setup_parameters["num_elements_length"] = 2 * ureg("") - setup_parameters["num_elements_width"] = 2 * ureg("") - setup_parameters["num_elements_height"] = 2 * ureg("") - setup_parameters["strain_state"] = "uniaxial" * ureg("") - - return setup_parameters - - def setup(self) -> None: - """Generates the mesh in 2D or 3D based on parameters - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - self.logger.debug("setup mesh for %s", self.p["dim"]) - - if self.p["dim"] == 2: - # build a rectangular mesh - self.mesh = df.mesh.create_rectangle( - MPI.COMM_WORLD, - [ - [0.0, 0.0], - [self.p["length"], self.p["height"]], - ], - [self.p["num_elements_length"], self.p["num_elements_height"]], - cell_type=df.mesh.CellType.quadrilateral, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - MPI.COMM_WORLD, - [ - [0.0, 0.0, 0.0], - [self.p["length"], self.p["width"], self.p["height"]], - ], - [self.p["num_elements_length"], self.p["num_elements_width"], self.p["num_elements_height"]], - cell_type=df.mesh.CellType.hexahedron, - ) - - else: - raise ValueError(f"wrong dimension {self.p['dim']} for problem setup") - - # initialize variable top_displacement - self.top_displacement = df.fem.Constant(domain=self.mesh, c=0.0) # applied via fkt: apply_displ_load(...) - self.use_body_force = False - self.temperature_bc = df.fem.Constant(domain=self.mesh, c=self.p["T_bc"]) - - def create_displacement_boundary(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass]: - """Defines the displacement boundary conditions - - Args: - V :Function space of the structure - - Returns: - list of DirichletBC objects, defining the boundary conditions - """ - - # define boundary conditions generator - bc_generator = BoundaryConditions(self.mesh, V) - - if self.p["dim"] == 2: - # uniaxial bcs - bc_generator.add_dirichlet_bc( - np.float64(0.0), boundary=self.boundary_bottom(), sub=1, method="geometrical", entity_dim=1 - ) - bc_generator.add_dirichlet_bc( - np.float64(0.0), boundary=self.boundary_left(), sub=0, method="geometrical", entity_dim=1 - ) - - if self.p["strain_state"] == "uniaxial": - # displacement controlled - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_top(), sub=1, method="geometrical", entity_dim=1 - ) - elif self.p["strain_state"] == "multiaxial": - # displacement controlled - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_top(), sub=1, method="geometrical", entity_dim=1 - ) - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_right(), sub=0, method="geometrical", entity_dim=1 - ) - else: - raise ValueError(f'Strain_state value: {self.p["strain_state"]} is not implemented in 2D.') - - elif self.p["dim"] == 3: - # uniaxial bcs - bc_generator.add_dirichlet_bc( - np.float64(0.0), boundary=self.boundary_bottom(), sub=2, method="geometrical", entity_dim=2 - ) - bc_generator.add_dirichlet_bc( - np.float64(0.0), boundary=self.boundary_left(), sub=0, method="geometrical", entity_dim=2 - ) - bc_generator.add_dirichlet_bc( - np.float64(0.0), boundary=self.boundary_front(), sub=1, method="geometrical", entity_dim=2 - ) - - # displacement controlled - if self.p["strain_state"] == "uniaxial": - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_top(), sub=2, method="geometrical", entity_dim=2 - ) - elif self.p["strain_state"] == "multiaxial": - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_top(), sub=2, method="geometrical", entity_dim=2 - ) - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_right(), sub=0, method="geometrical", entity_dim=2 - ) - bc_generator.add_dirichlet_bc( - self.top_displacement, boundary=self.boundary_back(), sub=1, method="geometrical", entity_dim=2 - ) - else: - raise ValueError(f'Strain_state value: {self.p["strain_state"]} is not implemented in 3D.') - - return bc_generator.bcs - - def apply_displ_load(self, top_displacement: pint.Quantity | float) -> None: - """Updates the applied displacement load - - Args: - top_displacement: Displacement of the top boundary in mm, > 0 ; tension, < 0 ; compression - - """ - top_displacement.ito_base_units() - self.top_displacement.value = top_displacement.magnitude - - def apply_temp_bc(self, T_bc: pint.Quantity | float) -> None: - """Updates the applied temperature boundary condition - - Args: - T_bc1: Temperature of the top boundary in degree Celsius - - """ - T_bc.ito_base_units() - self.temperature_bc.value = T_bc.magnitude - self.p["T_bc"] = T_bc.magnitude - - def apply_body_force(self) -> None: - self.use_body_force = True - - def create_temperature_bcs(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass]: - """defines empty temperature boundary conditions (to be done in child) - - this function is abstract until there is a need for a material that does need a temperature boundary - once that is required, just make this a normal function that returns an empty list - - Args: - V: function space - - Returns: - a list with temperature boundary conditions - - """ - - def full_boundary(x): - if self.p["dim"] == 2: - return ( - self.boundary_bottom()(x) - | self.boundary_left()(x) - | self.boundary_right()(x) - | self.boundary_top()(x) - ) - elif self.p["dim"] == 3: - return ( - self.boundary_back()(x) - | self.boundary_bottom()(x) - | self.boundary_front()(x) - | self.boundary_left()(x) - | self.boundary_right()(x) - | self.boundary_top()(x) - ) - - bc_generator = BoundaryConditions(self.mesh, V) - bc_generator.add_dirichlet_bc( - self.temperature_bc, - boundary=full_boundary, - method="geometrical", - entity_dim=self.mesh.topology.dim - 1, - ) - return bc_generator.bcs - - def create_body_force(self, v: ufl.argument.Argument) -> ufl.form.Form | None: - # TODO: The sign of the body force is not clear. - - if self.use_body_force: - force_vector = np.zeros(self.p["dim"]) - force_vector[-1] = self.p["rho"] * self.p["g"] # works for 2D and 3D - - f = df.fem.Constant(self.mesh, force_vector) - L = ufl.dot(f, v) * ufl.dx - - return L - else: - return None diff --git a/src/fenicsxconcrete/experimental_setup/tensile_beam.py b/src/fenicsxconcrete/experimental_setup/tensile_beam.py deleted file mode 100644 index 1567ef3..0000000 --- a/src/fenicsxconcrete/experimental_setup/tensile_beam.py +++ /dev/null @@ -1,148 +0,0 @@ -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.util import Parameters, ureg - - -class TensileBeam(Experiment): - """Sets up a tensile beam experiment, clamped on one side and loaded with force on the other side - - Attributes: - parameters : parameter dictionary with units - p : parameter dictionary without units - - """ - - def __init__(self, parameters: dict[str, pint.Quantity] | None = None) -> None: - """initializes the object, for the rest, see base class - - Args: - parameters: dictionary containing the required parameters for the experiment set-up - see default_parameters for a first guess - - """ - - super().__init__(parameters) - - def setup(self) -> None: - """defines the mesh for 2D or 3D - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - if self.p["dim"] == 2: - self.mesh = df.mesh.create_rectangle( - comm=MPI.COMM_WORLD, - points=[(0.0, 0.0), (self.p["length"], self.p["height"])], - n=(self.p["num_elements_length"], self.p["num_elements_height"]), - cell_type=df.mesh.CellType.quadrilateral, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - comm=MPI.COMM_WORLD, - points=[ - (0.0, 0.0, 0.0), - (self.p["length"], self.p["width"], self.p["height"]), - ], - n=[ - self.p["num_elements_length"], - self.p["num_elements_width"], - self.p["num_elements_height"], - ], - cell_type=df.mesh.CellType.hexahedron, - ) - else: - raise ValueError(f'wrong dimension: {self.p["dim"]} is not implemented for problem setup') - - @staticmethod - def default_parameters() -> dict[str, pint.Quantity]: - """sets up a working set of parameter values as example - - Returns: - dictionary with a working set of the required parameter - - """ - - setup_parameters = {} - - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["height"] = 0.3 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = 3 * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - setup_parameters["num_elements_width"] = 3 * ureg("") # only relevant for 3D case - setup_parameters["load"] = 2000 * ureg("kN") - - return setup_parameters - - def create_displacement_boundary(self, V) -> list: - """Defines the displacement boundary conditions - - Args: - V: Function space of the structure - - Returns: - list of DirichletBC objects, defining the boundary conditions - - """ - - # fenics will individually call this function for every node and will note the true or false value. - def clamped_boundary(x): - return np.isclose(x[0], 0) - - displacement_bcs = [] - - zero = np.zeros(self.p["dim"]) - displacement_bcs.append( - df.fem.dirichletbc( - np.array(zero, dtype=ScalarType), - df.fem.locate_dofs_geometrical(V, clamped_boundary), - V, - ) - ) - - return displacement_bcs - - def create_force_boundary(self, v: ufl.argument.Argument) -> ufl.form.Form: - """distributed load on top of beam - - Args: - v: test function - - Returns: - form for force boundary - - """ - - boundaries = [ - (1, lambda x: np.isclose(x[0], self.p["length"])), - (2, lambda x: np.isclose(x[0], 0)), - ] - - facet_indices, facet_markers = [], [] - fdim = self.mesh.topology.dim - 1 - for marker, locator in boundaries: - facets = df.mesh.locate_entities(self.mesh, fdim, locator) - facet_indices.append(facets) - facet_markers.append(np.full_like(facets, marker)) - facet_indices = np.hstack(facet_indices).astype(np.int32) - facet_markers = np.hstack(facet_markers).astype(np.int32) - sorted_facets = np.argsort(facet_indices) - facet_tag = df.mesh.meshtags(self.mesh, fdim, facet_indices[sorted_facets], facet_markers[sorted_facets]) - - _ds = ufl.Measure("ds", domain=self.mesh, subdomain_data=facet_tag) - - force_vector = np.zeros(self.p["dim"]) - force_vector[0] = self.p["load"] - - T = df.fem.Constant(self.mesh, ScalarType(force_vector)) - L = ufl.dot(T, v) * _ds(1) - - return L diff --git a/src/fenicsxconcrete/finite_element_problem/__init__.py b/src/fenicsxconcrete/finite_element_problem/__init__.py deleted file mode 100644 index 5074ea4..0000000 --- a/src/fenicsxconcrete/finite_element_problem/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .base_material import MaterialProblem, QuadratureFields, SolutionFields -from .concrete_am import ConcreteAM, ConcreteThixElasticModel -from .concrete_thermo_mechanical import ConcreteThermoMechanical -from .linear_elasticity import LinearElasticity diff --git a/src/fenicsxconcrete/finite_element_problem/base_material.py b/src/fenicsxconcrete/finite_element_problem/base_material.py deleted file mode 100644 index 7a2f9b7..0000000 --- a/src/fenicsxconcrete/finite_element_problem/base_material.py +++ /dev/null @@ -1,271 +0,0 @@ -from __future__ import annotations - -import importlib -import json -from abc import ABC, abstractmethod -from copy import deepcopy -from dataclasses import dataclass -from pathlib import Path, PosixPath - -import dolfinx as df -import jsonschema -import pint -import ufl - -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.sensor_definition.base_sensor import BaseSensor -from fenicsxconcrete.sensor_definition.sensor_schema import generate_sensor_schema -from fenicsxconcrete.util import LogMixin, Parameters, ureg - - -@dataclass -class SolutionFields: - """ - A dataclass to hold the solution fields of the problem. - The list of names should be extendend when needed. - - Examples: - Since this is a dataclass, the __init__ method is automatically - generated and can be used to selectively set fields. All fields that - are not explicitely set are set to their default value (here None). - - >>> fields = SolutionFields(displacement=some_function, temperature=some_other_function) - """ - - displacement: df.fem.Function | None = None - velocity: df.fem.Function | None = None - temperature: df.fem.Function | None = None - nonlocal_strain: df.fem.Function | None = None - - -@dataclass -class QuadratureFields: - """ - A dataclass to hold the quadrature fields (or ufl expressions) - of the problem, at least those that we want to plot in paraview. - Additionally, the measure for the integration and the type of function - space is stored. The list of names should be extendend when needed. - - Examples: - Since this is a dataclass, the __init__ method is automatically - generated and can be used to selectively set fields. All fields that - are not explicitely set are set to their default value (here None). - - >>> q_fields = QuadratureFields(measure=rule.dx, plot_space_type=("Lagrange", 4), stress=some_function) - """ - - measure: ufl.Measure | None = None - plot_space_type: tuple[str, int] = ("DG", 0) - mandel_stress: ufl.core.expr.Expr | df.fem.Function | None = None - mandel_strain: ufl.core.expr.Expr | df.fem.Function | None = None - stress: ufl.core.expr.Expr | df.fem.Function | None = None - strain: ufl.core.expr.Expr | df.fem.Function | None = None - degree_of_hydration: ufl.core.expr.Expr | df.fem.Function | None = None - damage: ufl.core.expr.Expr | df.fem.Function | None = None - compressive_strength: ufl.core.expr.Expr | df.fem.Function | None = None - tensile_strength: ufl.core.expr.Expr | df.fem.Function | None = None - youngs_modulus: ufl.core.expr.Expr | df.fem.Function | None = None - yield_values: ufl.core.expr.Expr | df.fem.Function | None = None - - -class MaterialProblem(ABC, LogMixin): - def __init__( - self, - experiment: Experiment, - parameters: dict[str, pint.Quantity], - pv_name: str = "pv_output_full", - pv_path: PosixPath | None = None, - ) -> None: - """Base material problem. - - Parameters - ---------- - experiment : object - parameters : dictionary, optional - Dictionary with parameters. When none is provided, default values are used - pv_name : string, optional - Name of the paraview file, if paraview output is generated - pv_path : string, optional - Name of the paraview path, if paraview output is generated - """ - - self.experiment = experiment - self.mesh = self.experiment.mesh - - # initialize parameter attributes - setup_parameters = Parameters() - # setting up default setup parameters defined in each child - _, default_p = self.default_parameters() - setup_parameters.update(default_p) - # update with experiment parameters - setup_parameters.update(self.experiment.parameters) - # update with input parameters - setup_parameters.update(parameters) - - # get logger info which input parameters are set to default values - # plus check dimensionality of input parameters - keys_set_default = [] - for key in dict(default_p): - if key not in parameters: - keys_set_default.append(key) - else: - # check if units are compatible - dim_given = parameters[key].dimensionality - dim_default = default_p[key].dimensionality - if dim_given != dim_default: - raise ValueError( - f"given units for {key} are not compatible with default units: {dim_given} != {dim_default}" - ) - self.logger.info(f"for the following parameters, the default values are used: {keys_set_default}") - - # set parameters as attribute - self.parameters = setup_parameters - # remove units for use in fem model - self.p = self.parameters.to_magnitude() - self.experiment.p = self.p # update experimental parameter list for use in e.g. boundary definition - - self.sensors = self.SensorDict() # list to hold attached sensors - - # setting up path for paraview output - if not pv_path: - pv_path = "." - self.pv_output_file = Path(pv_path) / (pv_name + ".xdmf") - - # setup fields for sensor output, can be defined in model - self.fields = None - self.q_fields = None - - self.residual = None # initialize residual - - # initialize time - self.time = 0.0 - - # set up xdmf file with mesh info - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "w") as f: - f.write_mesh(self.mesh) - - # setup the material object to access the function - self.setup() - - @staticmethod - @abstractmethod - def default_parameters() -> tuple[Experiment, dict[str, pint.Quantity]]: - """returns a dictionary with required parameters and a set of working values as example""" - # this must de defined in each setup class - - pass - - @abstractmethod - def setup(self) -> None: - # initialization of this specific problem - """Implemented in child if needed""" - - @abstractmethod - def solve(self) -> None: - """Implemented in child if needed""" - self.update_time() - # define what to do, to solve this problem - - @abstractmethod - def compute_residuals(self) -> None: - # define what to do, to compute the residuals. Called in solve - """Implemented in child if needed""" - - def add_sensor(self, sensor: BaseSensor) -> None: - if isinstance(sensor, BaseSensor): - self.sensors[sensor.name] = sensor - else: - raise ValueError("The sensor must be of the class Sensor") - - def clean_sensor_data(self) -> None: - for sensor_object in self.sensors.values(): - sensor_object.data.clear() - - def delete_sensor(self) -> None: - del self.sensors - self.sensors = self.SensorDict() - - def update_time(self) -> None: - """update time""" - self.time += self.p["dt"] - - def export_sensors_metadata(self, path: Path) -> None: - """Exports sensor metadata to JSON file according to the appropriate schema. - - Args: - path : Path - Path where the metadata should be stored - - """ - - sensors_metadata_dict = {"sensors": []} - - for key, value in self.sensors.items(): - sensors_metadata_dict["sensors"].append(value.report_metadata()) - # sensors_metadata_dict[key]["name"] = key - - with open(path, "w") as f: - json.dump(sensors_metadata_dict, f) - - def import_sensors_from_metadata(self, path: Path) -> None: - """Import sensor metadata to JSON file and validate with the appropriate schema. - - Args: - path : Path - Path where the metadata file is - - """ - - # Load and validate - sensors_metadata_dict = {} - with open(path, "r") as f: - sensors_metadata_dict = json.load(f) - schema = generate_sensor_schema() - jsonschema.validate(instance=sensors_metadata_dict, schema=schema) - - for sensor in sensors_metadata_dict["sensors"]: - # Dynamically import the module containing the class - module_name = "fenicsxconcrete.sensor_definition." + sensor["sensor_file"].lower() - module = importlib.import_module(module_name) - - # Create a dictionary of keyword arguments from the remaining properties in the dictionary - kwargs = { - k: v for k, v in sensor.items() if k not in ["id", "type", "sensor_file", "units", "dimensionality"] - } - - # Dynamically retrieve the class by its name - class_name = sensor["type"] - MySensorClass = getattr(module, class_name) - - # Instantiate an object of the class with the given properties - sensor_i = MySensorClass(name=sensor["id"], **kwargs) - sensor_i.set_units(units=sensor["units"]) - - self.add_sensor(sensor_i) - - class SensorDict(dict): - """ - Dict that also allows to access the parameter p["parameter"] via the matching attribute p.parameter - to make access shorter - - When to sensors with the same name are defined, the next one gets a number added to the name - """ - - def __getattr__(self, key: str): # -> BaseSensor: - return self[key] - - def __setitem__(self, initial_key: str, value: BaseSensor) -> None: - # check if key exists, if so, add a number to the name - i = 2 - key = initial_key - if key in self: - while key in self: - key = initial_key + str(i) - i += 1 - # rename the sensor object - value.name = key - - super().__setitem__(key, value) - - def __deepcopy__(self, memo: dict) -> SensorDict: - return self.__class__({k: deepcopy(v, memo) for k, v in self.items()}) diff --git a/src/fenicsxconcrete/finite_element_problem/concrete_am.py b/src/fenicsxconcrete/finite_element_problem/concrete_am.py deleted file mode 100644 index 6c5a437..0000000 --- a/src/fenicsxconcrete/finite_element_problem/concrete_am.py +++ /dev/null @@ -1,507 +0,0 @@ -import copy -from collections.abc import Callable -from typing import Type - -import dolfinx as df -import numpy as np -import pint -import ufl -from mpi4py import MPI -from petsc4py import PETSc - -from fenicsxconcrete.experimental_setup import AmMultipleLayers, Experiment -from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem, QuadratureFields, SolutionFields -from fenicsxconcrete.util import Parameters, QuadratureEvaluator, QuadratureRule, project, ureg - - -class ConcreteAM(MaterialProblem): - """A class for additive manufacturing models - - - including pseudo density approach for element activation -> set_initial_path == negative time when element will be activated - - time incremental weak form (in case of density load increments are computed automatic, otherwise user controlled) - - possible corresponding material laws - - [concretethixelasticmodel] linear elastic thixotropy = linear elastic with age dependent Young's modulus - - [concreteviscodevthixelasticmodel] thixotropy-viscoelastic model (Three parameter model: CMaxwell or CKelvin) with deviator assumption with age dependent moduli - - ... - - Attributes: - nonlinear_problem: the nonlinear problem class of used material law - further: see base class - """ - - def __init__( - self, - experiment: Experiment, - parameters: dict[str, pint.Quantity], - nonlinear_problem: Type[df.fem.petsc.NonlinearProblem] | None = None, - pv_name: str = "pv_output_full", - pv_path: str | None = None, - ) -> None: - """initialize object - - Args: - experiment: The experimental setup. - parameters: Dictionary with parameters. - nonlinear_problem: the nonlinear problem class of used material law - pv_name: Name of the paraview file, if paraview output is generated. - pv_path: Name of the paraview path, if paraview output is generated. - - """ - - # # adding default material parameter, will be overridden by outside input - # default_p = Parameters() - # # default stress state for 2D optional "plane_stress" - # - # # updating parameters, overriding defaults - # default_p.update(parameters) - - if nonlinear_problem: - self.nonlinear_problem = nonlinear_problem - else: - self.nonlinear_problem = ConcreteThixElasticModel # default material - - super().__init__(experiment, parameters, pv_name, pv_path) - - @staticmethod - def parameter_description() -> dict[str, str]: - """static method returning a description dictionary for required parameters - - Returns: - description dictionary - - """ - description = { - "general parameters": { - "rho": "density of fresh concrete", - "g": "gravity", - "nu": "Poissons Ratio", - "degree": "Polynomial degree for the FEM model", - "q_degree": "Polynomial degree for which the quadrature rule integrates correctly", - "load_time": "time in which the load is applied", - "stress_state": "for 2D plain stress or plane strain", - "dt": "time step", - }, - "ThixElasticModel": { - "E_0": "Youngs Modulus at age=0", - "R_E": "Reflocculation (first) rate", - "A_E": "Structuration (second) rate", - "tf_E": "Reflocculation time (switch point)", - "age_0": "Start age of concrete", - }, - } - - return description - - @staticmethod - def default_parameters( - non_linear_problem: df.fem.petsc.NonlinearProblem | None = None, - ) -> tuple[Experiment, dict[str, pint.Quantity]]: - """Static method that returns a set of default parameters for the selected nonlinear problem. - - Args: - non_linear_problem: the nonlinear problem class of used material law - - Returns: - The default experiment instance and the default parameters as a dictionary. - - """ - - # default experiment - experiment = AmMultipleLayers(AmMultipleLayers.default_parameters()) - - # default parameters according given nonlinear problem - joined_parameters = { - # Material parameter for concrete model with structural build-up - "rho": 2070 * ureg("kg/m^3"), # density of fresh concrete - "g": 9.81 * ureg("m/s^2"), # gravity - "nu": 0.3 * ureg(""), # Poissons Ratio - # other model parameters - "degree": 2 * ureg(""), # polynomial degree - "q_degree": 2 * ureg(""), # quadrature rule - "stress_state": "plane_strain" * ureg(""), # for 2D stress state - "dt": 1.0 * ureg("s"), # time step - "load_time": 60 * ureg("s"), # body force load applied in s - } - if not non_linear_problem or non_linear_problem == ConcreteThixElasticModel: - ### default parameters required for ThixElasticModel - model_parameters = { - # Youngs modulus is changing over age (see E_fkt) following the bilinear approach Kruger et al 2019 - # (https://www.sciencedirect.com/science/article/pii/S0950061819317507) with two different rates - "E_0": 15000 * ureg("Pa"), # Youngs Modulus at age=0 - "R_E": 15 * ureg("Pa/s"), # Reflocculation (first) rate - "A_E": 30 * ureg("Pa/s"), # Structuration (second) rate - "tf_E": 300 * ureg("s"), # Reflocculation time (switch point) - "age_0": 0 * ureg("s"), # start age of concrete - } - else: - raise ValueError("non_linear_problem not supported") - - return experiment, {**joined_parameters, **model_parameters} - - def setup(self) -> None: - """set up problem""" - - self.rule = QuadratureRule(cell_type=self.mesh.ufl_cell(), degree=self.p["q_degree"]) - # displacement space (name V required for sensors!) - self.V = df.fem.VectorFunctionSpace(self.mesh, ("CG", self.p["degree"])) - self.strain_stress_space = self.rule.create_quadrature_tensor_space(self.mesh, (self.p["dim"], self.p["dim"])) - - # global variables for all AM problems relevant - self.fields = SolutionFields(displacement=df.fem.Function(self.V, name="displacement")) - - self.q_fields = QuadratureFields( - measure=self.rule.dx, - plot_space_type=("DG", self.p["degree"] - 1), - strain=df.fem.Function(self.strain_stress_space, name="strain"), - stress=df.fem.Function(self.strain_stress_space, name="stress"), - ) - - # displacement increment - self.d_disp = df.fem.Function(self.V) - - # boundaries - bcs = self.experiment.create_displacement_boundary(self.V) - body_force_fct = self.experiment.create_body_force_am - - self.mechanics_problem = self.nonlinear_problem( - self.mesh, - self.p, - self.rule, - self.d_disp, - bcs, - body_force_fct, - ) - - # setting up the solver - self.mechanics_solver = df.nls.petsc.NewtonSolver(MPI.COMM_WORLD, self.mechanics_problem) - self.mechanics_solver.convergence_criterion = "incremental" - self.mechanics_solver.atol = 1e-9 - self.mechanics_solver.rtol = 1e-8 - self.mechanics_solver.report = True - - def solve(self) -> None: - """time incremental solving !""" - - self.update_time() # set t+dt - self.update_path() # set path - - self.logger.info(f"solve for t: {self.time}") - self.logger.info(f"CHECK if external loads are applied as incremental loads e.g. delta_u(t)!!!") - - # solve problem for current time increment - self.mechanics_solver.solve(self.d_disp) - - # update total displacement - self.fields.displacement.vector.array[:] += self.d_disp.vector.array[:] - self.fields.displacement.x.scatter_forward() - - # save fields to global problem for sensor output - self.q_fields.stress.vector.array[:] += self.mechanics_problem.q_sig.vector.array[:] - self.q_fields.stress.x.scatter_forward() - self.q_fields.strain.vector.array[:] += self.mechanics_problem.q_eps.vector.array[:] - self.q_fields.strain.x.scatter_forward() - - # additional output field not yet used in any sensors - self.youngsmodulus = self.mechanics_problem.q_E - - # get sensor data - self.compute_residuals() # for residual sensor - for sensor_name in self.sensors: - # go through all sensors and measure - self.sensors[sensor_name].measure(self) - - # update path & internal variables before next step! - self.mechanics_problem.update_history(fields=self.fields, q_fields=self.q_fields) # if required otherwise pass - - def compute_residuals(self) -> None: - """defines what to do, to compute the residuals. Called in solve for sensors""" - - self.residual = self.mechanics_problem.R - - def update_path(self) -> None: - """update path for next time increment""" - self.mechanics_problem.q_array_path += self.p["dt"] * np.ones_like(self.mechanics_problem.q_array_path) - - def set_initial_path(self, path: list[float] | float) -> None: - """set initial path for problem - - Args: - path: array describing the negative time when an element will be reached on quadrature space - if only one value is given, it is assumed that all elements are reached at the same time - - """ - if isinstance(path, float): - self.mechanics_problem.q_array_path = path * np.ones_like(self.mechanics_problem.q_array_path) - else: - self.mechanics_problem.q_array_path = path - - def pv_plot(self) -> None: - """creates paraview output at given time step""" - - self.logger.info(f"create pv plot for t: {self.time}") - - # write further fields - sigma_plot = project( - self.mechanics_problem.sigma(self.fields.displacement), - df.fem.TensorFunctionSpace(self.mesh, self.q_fields.plot_space_type), - self.rule.dx, - ) - - E_plot = project( - self.mechanics_problem.q_E, df.fem.FunctionSpace(self.mesh, self.q_fields.plot_space_type), self.rule.dx - ) - - E_plot.name = "Youngs_Modulus" - sigma_plot.name = "Stress" - - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "a") as f: - f.write_function(self.fields.displacement, self.time) - f.write_function(sigma_plot, self.time) - f.write_function(E_plot, self.time) - - @staticmethod - def fd_fkt(pd: list[float], path_time: list[float], dt: float, load_time: float) -> list[float]: - """computes weighting fct for body force term in pde - - body force can be applied in several loading steps given by parameter ["load_time"] - load factor for each step = 1 / "load_time" * dt - can be used in all nonlinear problems - - Args: - pd: array of pseudo density values - path_time: array of process time values - dt: time step value - load_time: time when load is fully applied - - Returns: - array of incremental weigths for body force - """ - fd = np.zeros_like(pd) - - active_idx = np.where(pd > 0)[0] # only active elements - # select indices where path_time is smaller than load_time and bigger then zero [since usually we start the computation at dt so that also for further layers the laoding starts at local layer time +dt] - load_idx = np.where((path_time[active_idx] <= load_time) & (path_time[active_idx] > 0)) - for _ in load_idx: - fd[active_idx[load_idx]] = dt / load_time # linear ramp - - return fd - - @staticmethod - def pd_fkt(path_time: list[float]) -> list[float]: - """computes pseudo density array - - pseudo density: decides if layer is there (age >=0 active) or not (age < 0 nonactive!) - decision based on current path_time value - can be used in all nonlinear problems - - Args: - path_time: array of process time values at quadrature points - - Returns: - array of pseudo density - """ - - l_active = np.zeros_like(path_time) # 0: non-active - - activ_idx = np.where(path_time >= 0 - 1e-5)[0] - l_active[activ_idx] = 1.0 # active - - return l_active - - @staticmethod - def E_fkt(pd: float, path_time: float, parameters: dict) -> float: - """computes the Young's modulus at current quadrature point according to bilinear Kruger model - - Args: - pd: value of pseudo density [0 - non active or 1 - active] - path_time: process time value - parameters: parameter dict for bilinear model described by (P0,R_P,A_P,tf_P,age_0) - - Returns: - value of current Young's modulus - """ - # print(parameters["age_0"] + path_time) - if pd > 0: # element active, compute current Young's modulus - age = parameters["age_0"] + path_time # age concrete - if age < parameters["tf_P"]: - E = parameters["P0"] + parameters["R_P"] * age - elif age >= parameters["tf_P"]: - E = ( - parameters["P0"] - + parameters["R_P"] * parameters["tf_P"] - + parameters["A_P"] * (age - parameters["tf_P"]) - ) - else: - E = 1e-4 # non-active - - return E - - -class ConcreteThixElasticModel(df.fem.petsc.NonlinearProblem): - """linear elastic thixotropy concrete model - - linear elasticity law with age dependent Youngs modulus modelling the thixotropy - tensor format!! - - Args: - mesh : The mesh. - parameters : Dictionary of material parameters. - rule: The quadrature rule. - u: displacement fct - bc: array of Dirichlet boundaries - body_force: function of creating body force - - """ - - def __init__( - self, - mesh: df.mesh.Mesh, - parameters: dict[str, int | float | str | bool], - rule: QuadratureRule, - u: df.fem.Function, - bc: list[df.fem.DirichletBCMetaClass], - body_force_fct: Callable, - ): - - self.p = parameters - self.rule = rule - self.mesh = mesh - dim_to_stress_dim = {1: 1, 2: 4, 3: 9} # Tensor formulation! - self.stress_strain_dim = dim_to_stress_dim[self.p["dim"]] - - # generic quadrature function space - q_V = self.rule.create_quadrature_space(self.mesh) - q_VT = self.rule.create_quadrature_tensor_space(self.mesh, (self.p["dim"], self.p["dim"])) - - # quadrature functions (required in pde) - self.q_E = df.fem.Function(q_V, name="youngs_modulus") - self.q_fd = df.fem.Function(q_V, name="density_increment") - - # path variable from AM Problem - self.q_array_path = self.rule.create_quadrature_array(self.mesh, shape=1) - # pseudo density for element activation - self.q_array_pd = self.rule.create_quadrature_array(self.mesh, shape=1) - - self.q_sig = df.fem.Function(q_VT, name="stress") - self.q_eps = df.fem.Function(q_VT, name="strain") - - # standard space - self.V = u.function_space - - # Define variational problem - v = ufl.TestFunction(self.V) - - # build up form - # multiplication with activated elements / current Young's modulus - R_ufl = ufl.inner(self.sigma(u), self.epsilon(v)) * self.rule.dx - - # apply body force - body_force = body_force_fct(v, self.q_fd, self.rule) - if body_force: - R_ufl -= body_force - - # quadrature point part - self.R = R_ufl - - # derivative - # normal form - dR_ufl = ufl.derivative(R_ufl, u) - - # quadrature part - self.dR = dR_ufl - self.sigma_evaluator = QuadratureEvaluator(self.sigma(u), self.mesh, self.rule) - self.eps_evaluator = QuadratureEvaluator(self.epsilon(u), self.mesh, self.rule) - - super().__init__(self.R, u, bc, self.dR) - - def x_sigma(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - """compute stresses for Young's modulus == 1 - - Args: - v: testfunction - - Returns: - ufl expression for sigma - """ - - x_mu = df.fem.Constant(self.mesh, 1.0 / (2.0 * (1.0 + self.p["nu"]))) - x_lambda = df.fem.Constant(self.mesh, 1.0 * self.p["nu"] / ((1.0 + self.p["nu"]) * (1.0 - 2.0 * self.p["nu"]))) - if self.p["dim"] == 2 and self.p["stress_state"] == "plane_stress": - # see https://comet-fenics.readthedocs.io/en/latest/demo/elasticity/2D_elasticity.py.html - x_lambda = df.fem.Constant(self.mesh, 2 * x_mu.value * x_lambda.value / (x_lambda.value + 2 * x_mu.value)) - - return 2.0 * x_mu * self.epsilon(v) + x_lambda * ufl.nabla_div(v) * ufl.Identity(self.p["dim"]) - - def sigma(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - """computes stresses for real Young's modulus given as quadrature fct q_E - - Args: - v: testfunction - - Returns: - ufl expression for sigma - """ - - return self.q_E * self.x_sigma(v) - - def epsilon(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - """computes strains - - Args: - v: testfunction - - Returns: - ufl expression for strain - """ - return ufl.sym(ufl.grad(v)) - - def form(self, x: PETSc.Vec) -> None: - """This function is called before the residual or Jacobian is - computed. We override it to calculate the values on the quadrature - functions. - Args: - x: The vector containing the latest solution - """ - self.evaluate_material() - super().form(x) - - def evaluate_material(self) -> None: - """evaluate material""" - - # compute current element activation using static function of ConcreteAM - self.q_array_pd = ConcreteAM.pd_fkt(self.q_array_path) - - # compute current Young's modulus - # vectorize the function for speed up - E_fkt_vectorized = np.vectorize(ConcreteAM.E_fkt) - E_array = E_fkt_vectorized( - self.q_array_pd, - self.q_array_path, - { - "P0": self.p["E_0"], - "R_P": self.p["R_E"], - "A_P": self.p["A_E"], - "tf_P": self.p["tf_E"], - "age_0": self.p["age_0"], - }, - ) - self.q_E.vector.array[:] = E_array - self.q_E.x.scatter_forward() - - # compute loading factors for density load using static function of ConcreteAM - fd_array = ConcreteAM.fd_fkt(self.q_array_pd, self.q_array_path, self.p["dt"], self.p["load_time"]) - self.q_fd.vector.array[:] = fd_array - self.q_fd.x.scatter_forward() - - # postprocessing - self.sigma_evaluator.evaluate(self.q_sig) - self.eps_evaluator.evaluate(self.q_eps) # -> globally in concreteAM not possible why? - - def update_history(self, fields: SolutionFields | None = None, q_fields: QuadratureFields | None = None) -> None: - """nothing here""" - - pass - - -# further nonlinear problem classes for different types of materials -# class ConcreteViscoDevThixElasticModel(df.fem.petsc.NonlinearProblem): diff --git a/src/fenicsxconcrete/finite_element_problem/concrete_thermo_mechanical.py b/src/fenicsxconcrete/finite_element_problem/concrete_thermo_mechanical.py deleted file mode 100644 index 86891e2..0000000 --- a/src/fenicsxconcrete/finite_element_problem/concrete_thermo_mechanical.py +++ /dev/null @@ -1,809 +0,0 @@ -from typing import Callable - -import dolfinx as df -import numpy as np -import pint -import scipy -import ufl -from petsc4py import PETSc - -from fenicsxconcrete.experimental_setup import Experiment, SimpleCube -from fenicsxconcrete.finite_element_problem import MaterialProblem, QuadratureFields, SolutionFields -from fenicsxconcrete.util import LogMixin, Parameters, QuadratureEvaluator, QuadratureRule, project, ureg - - -class ConcreteThermoMechanical(MaterialProblem, LogMixin): - """ - A class for a weakly coupled thermo-mechanical model, where the youngs modulus of the - concrete depends on the thermal problem. - - Args: - experiment: The experimental setup. - parameters: Dictionary with parameters. - pv_name: Name of the paraview file, if paraview output is generated. - pv_path: Name of the paraview path, if paraview output is generated. - """ - - def __init__( - self, - experiment: Experiment, - parameters: dict[str, pint.Quantity], - pv_name: str = "pv_output_full", - pv_path: str | None = None, - ) -> None: - - # # adding default material parameter, will be overridden by outside input - # default_p = Parameters() - # # default_p['dummy'] = 'example' * ureg('') # example default parameter for this class - # - # # updating parameters, overriding defaults - # default_p.update(parameters) - - super().__init__(experiment, parameters, pv_name, pv_path) - - @staticmethod - def parameter_description() -> dict[str, str]: - description = { - "igc": "Ideal gas constant", - "rho": "Density of concrete", - "g": "Gravitational acceleration", - "thermal_cond": "effective thermal conductivity", - "vol_heat_cap": "volumetric heat capacity", - "Q_pot": "potential heat per weight of binder", - "Q_inf": "potential heat per concrete volume", - "B1": "numerical shape parameter for heat release function", - "B2": "numerical shape parameter for heat release function", - "eta": "numerical shape parameter for heat release function, maybe something about diffusion", - "alpha_max": "maximum degree of hydration, must be <= 1", - "E_act": "activation energy per mol", - "T_ref": "reference temperature", - "degree": "Polynomial degree for the FEM model", - "q_degree": "Polynomial degree for which the quadrature rule integrates correctly", - "E_28": "Youngs Modulus of concrete", - "nu": "Poissons Ratio", - "alpha_t": "value of DOH where linear function changes to exponential, approx 0.1-0.2", - "alpha_0": "offset for Young's modulus as a function of DOH, to allow for a dormant phase", - "a_E": "exponential parameter to change the shape of the function E(DOH)", - "fc_inf": "reference value for the compressive strength, default infinity, otherwise at alpha_tx", - "a_fc": "exponential parameter to change the shape of the function fc(DOH)", - "ft_inf": "reference value for the tensile strength, default infinity, otherwise at alpha_tx", - "a_ft": "exponential parameter to change the shape of the function ft(DOH)", - "evolution_ft": "flag to turn off the evolution of the tensile strength", - "dt": "time step", - } - - return description - - @staticmethod - def default_parameters() -> tuple[Experiment, dict[str, pint.Quantity]]: - """ - Static method that returns a set of default parameters. - - Returns: - The default parameters as a dictionary. - """ - experiment = SimpleCube(SimpleCube.default_parameters()) - # Material parameter for concrete model with temperature and hydration - default_parameters = { - "igc": 8.3145 * ureg("J/K/mol"), - "rho": 2350.0 * ureg("kg/m^3"), - "g": 9.81 * ureg("m/s^2"), - "thermal_cond": 2.0 * ureg("W/(m*K)"), - "vol_heat_cap": 2.4e6 * ureg("J/(m^3 * K)"), - # "Q_pot": 500e3 * ureg("J/kg"), only needed for postprocessing - "Q_inf": 144000000 * ureg("J/m^3"), - "B1": 2.916e-4 * ureg("1/s"), - "B2": 0.0024229 * ureg("1/s"), - "eta": 5.554 * ureg(""), - "alpha_max": 0.875 * ureg(""), - "alpha_tx": 0.68 * ureg(""), - "T_ref": ureg.Quantity(25.0, ureg.degC), - "degree": 2 * ureg(""), - "q_degree": 2 * ureg(""), - "E_28": 15 * ureg("MPa"), - "nu": 0.2 * ureg(""), - "alpha_t": 0.2 * ureg(""), - "alpha_0": 0.05 * ureg(""), - "a_E": 0.6 * ureg(""), - "fc_inf": 6210000 * ureg(""), - "a_fc": 1.2 * ureg(""), - "ft_inf": 467000 * ureg(""), - "a_ft": 1.0 * ureg(""), - "evolution_ft": "True" * ureg(""), - "dt": 1.0 * ureg("s"), - } - default_parameters["E_act"] = 5653.0 * ureg("K") * default_parameters["igc"] - - return experiment, default_parameters - - def compute_residuals(self) -> None: - pass - - def setup(self) -> None: - self.t = 0.0 - - self.rule = QuadratureRule(cell_type=self.mesh.ufl_cell(), degree=self.p["q_degree"]) - displacement_space = df.fem.VectorFunctionSpace(self.experiment.mesh, ("P", self.p["degree"])) - temperature_space = df.fem.FunctionSpace(self.experiment.mesh, ("P", self.p["degree"])) - - self.fields = SolutionFields( - displacement=df.fem.Function(displacement_space, name="displacement"), - temperature=df.fem.Function(temperature_space, name="temperature"), - ) - - bcs_temperature = self.experiment.create_temperature_bcs(temperature_space) - # setting up the two nonlinear problems - self.temperature_problem = ConcreteTemperatureHydrationModel( - self.experiment.mesh, self.p, self.rule, self.fields.temperature, bcs_temperature - ) - - # here I "pass on the parameters from temperature to mechanics problem.." - bcs_mechanical = self.experiment.create_displacement_boundary(displacement_space) - body_forces = self.experiment.create_body_force(ufl.TestFunction(displacement_space)) - - self.mechanics_problem = ConcreteMechanicsModel( - self.experiment.mesh, - self.p, - self.rule, - self.fields.displacement, - bcs_mechanical, - body_forces, - ) - - # set q_fields now that the solvers are initialized - plot_space_type = ("DG", 0) if self.p["degree"] == 1 else ("CG", self.p["degree"] - 1) - self.q_fields = QuadratureFields( - measure=self.rule.dx, - plot_space_type=plot_space_type, - stress=self.mechanics_problem.sigma_voigt(self.mechanics_problem.sigma(self.fields.displacement)), - degree_of_hydration=self.temperature_problem.q_alpha, - youngs_modulus=self.mechanics_problem.q_E, - compressive_strength=self.mechanics_problem.q_fc, - tensile_strength=self.mechanics_problem.q_ft, - yield_values=self.mechanics_problem.q_yield, - ) - - # setting up the solvers - self.temperature_solver = df.nls.petsc.NewtonSolver(self.experiment.mesh.comm, self.temperature_problem) - self.temperature_solver.atol = 1e-8 - self.temperature_solver.rtol = 1e-8 - self.temperature_solver.max_it = 100 - self.temperature_solver.error_on_nonconvergence = False - - self.mechanics_solver = df.nls.petsc.NewtonSolver(self.experiment.mesh.comm, self.mechanics_problem) - self.mechanics_solver.atol = 1e-8 - self.mechanics_solver.rtol = 1e-8 - self.mechanics_solver.max_it = 5 - self.mechanics_solver.error_on_nonconvergence = False - - self.plot_space = df.fem.FunctionSpace(self.experiment.mesh, self.q_fields.plot_space_type) - self.plot_space_stress = df.fem.VectorFunctionSpace( - self.experiment.mesh, self.q_fields.plot_space_type, dim=self.mechanics_problem.stress_strain_dim - ) - - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "w") as f: - f.write_mesh(self.mesh) - - def solve(self) -> None: - # from dolfinx import log - # log.set_log_level(log.LogLevel.INFO) - self.logger.info(f"Starting solve for temperature at time {self.time}") - n, converged = self.temperature_solver.solve(self.fields.temperature) - - if not converged: - raise Exception("Temperature solve did not converge") - else: - self.logger.info(f"Temperature solve converged in {n} iterations") - - # set current DOH for computation of Young's modulus - self.mechanics_problem.q_array_alpha[:] = self.temperature_problem.q_alpha.vector.array - - # mechanics paroblem is not required for temperature, could crash in frist time steps but then be useful - try: - n, converged = self.mechanics_solver.solve(self.fields.displacement) - if not converged: - self.logger.warning("Mechanics solve did not converge") - else: - self.logger.info(f"Mechanics solve converged in {n} iterations") - except RuntimeError as e: - print( - f"An error occured during the mechanics solve. This can happen in the first few solves. Error message {e}" - ) - - # history update - self.temperature_problem.update_history() - - # get sensor data - for sensor_name in self.sensors: - # go through all sensors and measure - self.sensors[sensor_name].measure(self) - - self.update_time() - - def pv_plot(self) -> None: - self.logger.info(f"Writing output to {self.pv_output_file}") - t = self.time - self._pv_plot_mechanics(t) - self._pv_plot_temperature(t) - - def _pv_plot_temperature(self, t) -> None: - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "a") as f: - f.write_function(self.fields.temperature, t) - - alpha_plot = project(self.q_fields.degree_of_hydration, self.plot_space, self.q_fields.measure) - alpha_plot.name = "alpha" - f.write_function(alpha_plot, t) - - def _pv_plot_mechanics(self, t) -> None: - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "a") as f: - f.write_function(self.fields.displacement, t) - - sigma_plot = project(self.q_fields.stress, self.plot_space_stress, self.rule.dx) - E_plot = project(self.q_fields.youngs_modulus, self.plot_space, self.rule.dx) - fc_plot = project(self.q_fields.compressive_strength, self.plot_space, self.rule.dx) - ft_plot = project(self.q_fields.tensile_strength, self.plot_space, self.rule.dx) - yield_plot = project(self.q_fields.yield_values, self.plot_space, self.rule.dx) - - E_plot.name = "Youngs_Modulus" - fc_plot.name = "Compressive_strength" - ft_plot.name = "Tensile_strength" - yield_plot.name = "Yield_surface" - sigma_plot.name = "Stress" - - f.write_function(sigma_plot, t) - f.write_function(E_plot, t) - f.write_function(fc_plot, t) - f.write_function(ft_plot, t) - f.write_function(yield_plot, t) - - def set_timestep(self, dt: float) -> None: - self.temperature_problem.set_timestep(dt) - - def get_heat_of_hydration_ftk(self) -> Callable: - return self.temperature_problem.heat_of_hydration_ftk - - def get_E_alpha_fkt(self) -> Callable: - return np.vectorize(self.mechanics_problem.E_fkt) - - def get_X_alpha_fkt(self) -> Callable: - return self.mechanics_problem.general_hydration_fkt - - -class ConcreteTemperatureHydrationModel(df.fem.petsc.NonlinearProblem, LogMixin): - def __init__( - self, - mesh: df.mesh.Mesh, - parameters: dict[str, int | float | str | bool], - rule: QuadratureRule, - temperature: df.fem.Function, - bcs: list[df.fem.DirichletBCMetaClass], - ) -> None: - self.mesh = mesh - self.p = parameters - self.rule = rule - self.T = temperature - self.bcs = bcs - # initialize timestep, musst be reset using .set_timestep(dt) - self.dt = parameters["dt"] - self.dt_form = df.fem.Constant(self.mesh, self.dt) - - # generic quadrature function space - q_V = self.rule.create_quadrature_space(self.mesh) - - # quadrature functions - self.q_alpha = df.fem.Function(q_V, name="degree_of_hydration") - self.q_delta_alpha = df.fem.Function(q_V, name="inrease_in_degree_of_hydration") - self.q_ddalpha_dT = df.fem.Function(q_V, name="derivative_of_delta_alpha_wrt_temperature") - - # quadrature arrays - self.q_array_T = self.rule.create_quadrature_array(self.mesh) - self.q_array_alpha_n = self.rule.create_quadrature_array(self.mesh) - # empfy list for newton iteration to compute delta alpha using the last value as starting point - self.q_array_delta_alpha_n = np.full(np.shape(self.q_array_T), 0.2) - # empfy list for newton iteration to compute delta alpha using the last value as starting point - self.delta_alpha_guess = [0.5, 1.0] - - # scalars for the analysis of the heat of hydration - self.alpha = 0 - self.delta_alpha = 0 - - # Define variational problem - self.T_n = df.fem.Function(self.T.function_space) - T_ = ufl.TrialFunction(self.T.function_space) - vT = ufl.TestFunction(self.T.function_space) - - # normal form - R_ufl = self.p["vol_heat_cap"] * self.T * vT * self.rule.dx - R_ufl += self.dt_form * ufl.dot(self.p["thermal_cond"] * ufl.grad(self.T), ufl.grad(vT)) * self.rule.dx - R_ufl += -self.p["vol_heat_cap"] * self.T_n * vT * self.rule.dx - # quadrature point part - - self.R = R_ufl - self.p["Q_inf"] * self.q_delta_alpha * vT * self.rule.dx - - # derivative - # normal form - dR_ufl = ufl.derivative(R_ufl, self.T) - # quadrature part - self.dR = dR_ufl - self.p["Q_inf"] * self.q_ddalpha_dT * T_ * vT * self.rule.dx - - # setup projector to project continuous funtionspace to quadrature - self.temperature_evaluator = QuadratureEvaluator(self.T, self.mesh, self.rule) - - self.T_n.x.array[:] = self.p["T_0"] - self.T.x.array[:] = self.p["T_0"] - - super().__init__(self.R, self.T, self.bcs, self.dR) - - def delta_alpha_fkt(self, delta_alpha: np.ndarray, alpha_n: np.ndarray, T: np.ndarray) -> np.ndarray: - return delta_alpha - self.dt * self.affinity(delta_alpha, alpha_n) * self.temp_adjust(T) - - def delta_alpha_prime(self, delta_alpha: np.ndarray, alpha_n: np.ndarray, T: np.ndarray) -> np.ndarray: - return 1 - self.dt * self.daffinity_ddalpha(delta_alpha, alpha_n) * self.temp_adjust(T) - - def heat_of_hydration_ftk( - self, T: np.ndarray, time_list: list[float], dt: float, parameter: dict - ) -> tuple[np.ndarray, np.ndarray]: - def interpolate(x, x_list, y_list): - # assuming ordered x list - - i = 0 - # check if x is in the dataset - if x > x_list[-1]: - print(" * Warning!!!: Extrapolation!!!") - point1 = (x_list[-2], y_list[-2]) - point2 = (x_list[-1], y_list[-1]) - elif x < x_list[0]: - print(" * Warning!!!: Extrapolation!!!") - point1 = (x_list[0], y_list[0]) - point2 = (x_list[1], y_list[1]) - else: - while x_list[i] < x: - i += 1 - point1 = (x_list[i - 1], y_list[i - 1]) - point2 = (x_list[i], y_list[i]) - - slope = (point2[1] - point1[1]) / (point2[0] - point1[0]) - x_increment = x - point1[0] - y_increment = slope * x_increment - y = point1[1] + y_increment - - return y - - # get tmax, identify number of time steps, then interpolate data - # assuming time list is ordered!!! - tmax = time_list[-1] - - # set paramters - self.p["B1"] = parameter["B1"] - self.p["B2"] = parameter["B2"] - self.p["eta"] = parameter["eta"] - self.p["alpha_max"] = parameter["alpha_max"] - self.p["E_act"] = parameter["E_act"] - self.p["T_ref"] = parameter["T_ref"] - self.p["Q_pot"] = parameter["Q_pot"] - - # set time step - self.dt = dt - - t = 0 - time = [0.0] - heat = [0.0] - alpha_list = [0.0] - alpha = 0 - delta_alpha_list = [0.0, 0.2, 0.5, 1.0] - delta_alpha_opt = -1.0 - error_flag = False - - while t < tmax: - for delta_alpha in delta_alpha_list: - delta_alpha_opt = scipy.optimize.newton( - self.delta_alpha_fkt, - args=(alpha, T), - fprime=self.delta_alpha_prime, - x0=delta_alpha, - ) - if delta_alpha_opt >= 0.0: - # success - break - if delta_alpha_opt < 0.0: - error_flag = True - - # update alpha - alpha = delta_alpha_opt + alpha - # save heat of hydration - alpha_list.append(alpha) - heat.append(alpha * self.p["Q_pot"]) - - # timeupdate - t = t + self.dt - time.append(t) - - # if there was a probem with the computation (bad input values), return zero - if error_flag: - heat_interpolated = np.zeros_like(time_list) - alpha_interpolated = np.zeros_like(time_list) - else: - # interpolate heat to match time_list - heat_interpolated = [] - alpha_interpolated = [] - for value in time_list: - heat_interpolated.append(interpolate(value, time, heat)) - alpha_interpolated.append(interpolate(value, time, alpha_list)) - - return np.asarray(heat_interpolated) / 1000, np.asarray(alpha_interpolated) - - def get_affinity(self) -> tuple[np.ndarray, np.ndarray]: - alpha_list = [] - affinity_list = [] - for val in range(1000): - alpha = val / 1000 - alpha_list.append(alpha) - affinity_list.append(self.affinity(alpha, 0)) - - return np.asarray(alpha_list), np.asarray(affinity_list) - - def evaluate_material(self) -> None: - - self.temperature_evaluator.evaluate(self.q_array_T) - - # solve for alpha at each quadrature point - # here the newton raphson method of the scipy package is used - # the zero value of the delta_alpha_fkt is found for each entry in alpha_n_list is found. the corresponding - # temparature is given in temperature_list and as starting point the value of last step used from delta_alpha_n - delta_alpha = scipy.optimize.newton( - self.delta_alpha_fkt, - args=(self.q_array_alpha_n, self.q_array_T), - fprime=self.delta_alpha_prime, - x0=self.q_array_delta_alpha_n, - ) - if np.any(delta_alpha < 0.0): - self.logger.info("Newton method failed, trying different starting points") - for guess in self.delta_alpha_guess: - delta_alpha = scipy.optimize.newton( - self.delta_alpha_fkt, - args=(self.q_array_alpha_n, self.q_array_T), - fprime=self.delta_alpha_prime, - x0=np.full_like(self.q_array_alpha_n, guess), - ) - if np.any(delta_alpha >= 0.0): - break - if np.any(delta_alpha < 0.0): - self.logger.error("HydrationModel: Newton Method failed with new starting points. Negative delta alpha.") - raise Exception( - "There is a problem with the alpha computation/initial guess, computed delta alpha is negative." - ) - - # save the delta alpha for next iteration as starting guess - self.q_array_delta_alpha_n = delta_alpha - - # compute current alpha - self.q_alpha.vector.array[:] = self.q_array_alpha_n + delta_alpha - # compute derivative of delta alpha with respect to temperature for rhs - self.q_ddalpha_dT.vector.array[:] = ( - self.dt - * self.affinity(self.q_alpha.vector.array, self.q_array_alpha_n) - * self.temp_adjust_tangent(self.q_array_T) - ) - - self.q_delta_alpha.vector.array[:] = delta_alpha - - def update_history(self) -> None: - self.T_n.x.array[:] = self.T.x.array # save temparature field - self.q_array_alpha_n[:] = self.q_alpha.vector.array # save alpha field - - def set_timestep(self, dt: float) -> None: - self.dt = dt - self.dt_form.value = dt - - def form(self, x: PETSc.Vec) -> None: - if self.dt <= 0: - raise RuntimeError("You need to `.set_timestep(dt)` larger than zero before the solve!") - - self.evaluate_material() - super().form(x) - - # needed for evaluation - def temp_adjust(self, T: np.ndarray) -> np.ndarray: - return np.exp(-self.p["E_act"] / self.p["igc"] * (1 / T - 1 / (self.p["T_ref"]))) - - # derivative of the temperature adjustment factor with respect to the temperature - - def temp_adjust_tangent(self, T: np.ndarray) -> np.ndarray: - return self.temp_adjust(T) * self.p["E_act"] / self.p["igc"] / T**2 - - # affinity function, needed for material_evaluation - def affinity(self, delta_alpha: np.ndarray, alpha_n: np.ndarray) -> np.ndarray: - affinity = ( - self.p["B1"] - * (self.p["B2"] / self.p["alpha_max"] + delta_alpha + alpha_n) - * (self.p["alpha_max"] - (delta_alpha + alpha_n)) - * np.exp(-self.p["eta"] * (delta_alpha + alpha_n) / self.p["alpha_max"]) - ) - return affinity - - # derivative of affinity with respect to delta alpha, needed for evaluation - def daffinity_ddalpha(self, delta_alpha: np.ndarray, alpha_n: np.ndarray) -> np.ndarray: - affinity_prime = ( - self.p["B1"] - * np.exp(-self.p["eta"] * (delta_alpha + alpha_n) / self.p["alpha_max"]) - * ( - (self.p["alpha_max"] - (delta_alpha + alpha_n)) - * (self.p["B2"] / self.p["alpha_max"] + (delta_alpha + alpha_n)) - * (-self.p["eta"] / self.p["alpha_max"]) - - self.p["B2"] / self.p["alpha_max"] - - 2 * (delta_alpha + alpha_n) - + self.p["alpha_max"] - ) - ) - return affinity_prime - - -class ConcreteMechanicsModel(df.fem.petsc.NonlinearProblem): - """ - Description of a concrete mechanics model - - Args: - mesh : The mesh. - parameters : Dictionary of material parameters. - rule: The quadrature rule. - pv_name: Name of the output file. - - """ - - def __init__( - self, - mesh: df.mesh.Mesh, - parameters: dict[str, int | float | str | bool], - rule: QuadratureRule, - u: df.fem.Function, - bcs: list[df.fem.DirichletBCMetaClass], - body_forces: ufl.form.Form | None, - ): - self.p = parameters - dim_to_stress_dim = {1: 1, 2: 3, 3: 6} - self.stress_strain_dim = dim_to_stress_dim[self.p["dim"]] - self.rule = rule - self.mesh = mesh - - # generic quadrature function space - q_V = self.rule.create_quadrature_space(mesh) - - # quadrature functions - self.q_E = df.fem.Function(q_V, name="youngs_modulus") - - self.q_fc = df.fem.Function(q_V) - self.q_ft = df.fem.Function(q_V) - self.q_yield = df.fem.Function(q_V) - self.q_array_alpha = self.rule.create_quadrature_array(self.mesh, shape=1) - self.q_array_sigma = self.rule.create_quadrature_array(self.mesh, shape=self.stress_strain_dim) - - # initialize degree of hydration to 1, in case machanics module is run without hydration coupling - self.q_array_alpha[:] = 1.0 - - # Define variational problem - v = ufl.TestFunction(u.function_space) - - # Elasticity parameters without multiplication with E - self.x_mu = 1.0 / (2.0 * (1.0 + self.p["nu"])) - self.x_lambda = 1.0 * self.p["nu"] / ((1.0 + self.p["nu"]) * (1.0 - 2.0 * self.p["nu"])) - - R_ufl = ufl.inner(self.sigma(u), self.eps(v)) * self.rule.dx - if body_forces is not None: - R_ufl += body_forces - - self.R = R_ufl - - # derivative - # normal form - dR_ufl = ufl.derivative(R_ufl, u) - # quadrature part - self.dR = dR_ufl - self.sigma_evaluator = QuadratureEvaluator(self.sigma_voigt(self.sigma(u)), self.mesh, self.rule) - super().__init__(self.R, u, bcs, self.dR) - - def _x_sigma(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - eps = ufl.sym(ufl.grad(v)) - x_sigma = 2.0 * self.x_mu * eps + self.x_lambda * ufl.tr(eps) * ufl.Identity(len(v)) - return x_sigma - - def sigma(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - return self.q_E * self._x_sigma(v) - - def eps(self, v: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - return ufl.sym(ufl.grad(v)) - - def E_fkt(self, alpha: float, parameters: dict) -> float: - parameters["E_inf"] = ( - parameters["E"] - / ((parameters["alpha_tx"] - parameters["alpha_0"]) / (1 - parameters["alpha_0"])) ** parameters["a_E"] - ) - - if alpha < parameters["alpha_t"]: - E = ( - parameters["E_inf"] - * alpha - / parameters["alpha_t"] - * ((parameters["alpha_t"] - parameters["alpha_0"]) / (1 - parameters["alpha_0"])) ** parameters["a_E"] - ) - else: - E = ( - parameters["E_inf"] - * ((alpha - parameters["alpha_0"]) / (1 - parameters["alpha_0"])) ** parameters["a_E"] - ) - return E - - def general_hydration_fkt(self, alpha: np.ndarray, parameters: dict) -> np.ndarray: - return parameters["X_inf"] * alpha ** parameters["a_X"] - - def form(self, x: PETSc.Vec) -> None: - """This function is called before the residual or Jacobian is - computed. We override it to calculate the values on the quadrature - functions. - - Args: - x: The vector containing the latest solution - """ - self.evaluate_material() - super().form(x) - - def sigma_voigt(self, s: ufl.core.expr.Expr) -> ufl.core.expr.Expr: - # 1D option - if s.ufl_shape == (1, 1): - stress_vector = ufl.as_vector((s[0, 0])) - # 2D option - elif s.ufl_shape == (2, 2): - stress_vector = ufl.as_vector((s[0, 0], s[1, 1], s[0, 1])) - # 3D option - elif s.ufl_shape == (3, 3): - stress_vector = ufl.as_vector((s[0, 0], s[1, 1], s[2, 2], s[0, 1], s[1, 2], s[0, 2])) - else: - raise ("Problem with stress tensor shape for voigt notation") - return stress_vector - - def evaluate_material(self) -> None: - # convert quadrature spaces to numpy vector - # TODO: Why these new parameters? - parameters = {} - parameters["alpha_t"] = self.p["alpha_t"] - parameters["E"] = self.p["E_28"] - parameters["alpha_0"] = self.p["alpha_0"] - parameters["a_E"] = self.p["a_E"] - parameters["alpha_tx"] = self.p["alpha_tx"] - # vectorize the function for speed up - # TODO: remove vectorization. It does nothing for speed-up - E_fkt_vectorized = np.vectorize(self.E_fkt) - self.q_E.vector.array[:] = E_fkt_vectorized(self.q_array_alpha, parameters) - self.q_E.x.scatter_forward() - - # from here postprocessing - parameters = {} - parameters["X_inf"] = self.p["fc_inf"] - parameters["a_X"] = self.p["a_fc"] - - self.q_fc.vector.array[:] = self.general_hydration_fkt(self.q_array_alpha, parameters) - self.q_fc.x.scatter_forward() - - parameters = {} - parameters["X_inf"] = self.p["ft_inf"] - parameters["a_X"] = self.p["a_ft"] - - if self.p["evolution_ft"] == "True": - self.q_ft.vector.array[:] = self.general_hydration_fkt(self.q_array_alpha, parameters) - else: - # no evolution.... - self.q_ft.vector.array[:] = np.full_like(self.q_array_alpha, self.p["ft_inf"]) - self.q_ft.x.scatter_forward() - - self.sigma_evaluator.evaluate(self.q_array_sigma) - # print(self.q_E.vector.array.shape, self.q_array_sigma.shape) - # self.q_array_sigma *= self.q_E.vector.array - - self.q_yield.vector.array[:] = self.yield_surface( - self.q_array_sigma.reshape(-1, self.stress_strain_dim), self.q_ft.vector.array, self.q_fc.vector.array - ) - - def principal_stress(self, stresses: np.ndarray) -> np.ndarray: - # checking type of problem - n = stresses.shape[1] # number of stress components in stress vector - # finding eigenvalues of symmetric stress tensor - # 1D problem - if n == 1: - principal_stresses = stresses - # 2D problem - elif n == 3: - # the following uses - # lambda**2 - tr(sigma)lambda + det(sigma) = 0, solve for lambda using pq formula - p = -(stresses[:, 0] + stresses[:, 1]) - q = stresses[:, 0] * stresses[:, 1] - stresses[:, 2] ** 2 - - D = p**2 / 4 - q # help varibale - assert np.all(D >= -1.0e-15) # otherwise problem with imaginary numbers - sqrtD = np.sqrt(D) - - eigenvalues_1 = -p / 2.0 + sqrtD - eigenvalues_2 = -p / 2.0 - sqrtD - - # strack lists as array - principal_stresses = np.column_stack((eigenvalues_1, eigenvalues_2)) - - # principal_stress = np.array([ev1p,ev2p]) - elif n == 6: - principal_stresses = np.zeros([len(stresses), 3]) - # currently slow solution with loop over all stresses and subsequent numpy function call: - for i, stress in enumerate(stresses): - # convert voigt to tensor, (00,11,22,12,02,01) - stress_tensor = np.array( - [ - [stress[0], stress[5], stress[4]], - [stress[5], stress[1], stress[3]], - [stress[4], stress[3], stress[2]], - ] - ) - try: - # TODO: remove the sorting - principal_stress = np.linalg.eigvalsh(stress_tensor) - # sort principal stress from lagest to smallest!!! - principal_stresses[i] = np.flip(principal_stress) - except np.linalg.LinAlgError as e: - pass - - return principal_stresses - - def yield_surface(self, stresses: np.ndarray, ft: np.ndarray, fc: float) -> np.ndarray: - # TODO: it does not make sense anymore to include this postprocessing step in the material class - # I would suggest to create a sensor, that reads stress and outputs this yield value - # but I would maybe add that to future problems if you agree, otherwise leave it and ignore this - - # function for approximated yield surface - # first approximation, could be changed if we have numbers/information - fc2 = fc - # pass voigt notation and compute the principal stress - p_stresses = self.principal_stress(stresses) - - # get the principle tensile stresses - t_stresses = np.where(p_stresses < 0, 0, p_stresses) - - # get dimension of problem, ie. length of list with principal stresses - n = p_stresses.shape[1] - # check case - if n == 1: - # rankine for the tensile region - rk_yield_vals = t_stresses[:, 0] - ft[:] - - # invariants for drucker prager yield surface - I1 = stresses[:, 0] - I2 = np.zeros_like(I1) - # 2D problem - elif n == 2: - - # rankine for the tensile region - rk_yield_vals = (t_stresses[:, 0] ** 2 + t_stresses[:, 1] ** 2) ** 0.5 - ft[:] - - # invariants for drucker prager yield surface - I1 = stresses[:, 0] + stresses[:, 1] - I2 = ((stresses[:, 0] + stresses[:, 1]) ** 2 - ((stresses[:, 0]) ** 2 + (stresses[:, 1]) ** 2)) / 2 - - # 3D problem - elif n == 3: - # rankine for the tensile region - rk_yield_vals = (t_stresses[:, 0] ** 2 + t_stresses[:, 1] ** 2 + t_stresses[:, 2] ** 2) ** 0.5 - ft[:] - - # invariants for drucker prager yield surface - I1 = stresses[:, 0] + stresses[:, 1] + stresses[:, 2] - I2 = ( - (stresses[:, 0] + stresses[:, 1] + stresses[:, 2]) ** 2 - - ((stresses[:, 0]) ** 2 + (stresses[:, 1]) ** 2 + (stresses[:, 2]) ** 2) - ) / 2 - else: - raise ("Problem with input to yield surface, the array with stress values has the wrong size ") - - J2 = 1 / 3 * I1**2 - I2 - beta = (3.0**0.5) * (fc2 - fc) / (2 * fc2 - fc) - Hp = fc2 * fc / ((3.0**0.5) * (2 * fc2 - fc)) - - dp_yield_vals = beta / 3 * I1 + J2**0.5 - Hp - - # TODO: is this "correct", does this make sense? for a compression state, what if rk yield > dp yield??? - yield_vals = np.maximum(rk_yield_vals, dp_yield_vals) - - return yield_vals diff --git a/src/fenicsxconcrete/finite_element_problem/linear_elasticity.py b/src/fenicsxconcrete/finite_element_problem/linear_elasticity.py deleted file mode 100644 index e2c3fef..0000000 --- a/src/fenicsxconcrete/finite_element_problem/linear_elasticity.py +++ /dev/null @@ -1,157 +0,0 @@ -import dolfinx as df -import numpy as np -import pint -import ufl -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.experimental_setup import CantileverBeam, Experiment -from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem, QuadratureFields, SolutionFields -from fenicsxconcrete.util import Parameters, ureg - - -class LinearElasticity(MaterialProblem): - """Material definition for linear elasticity""" - - def __init__( - self, - experiment: Experiment, - parameters: dict[str, pint.Quantity], - pv_name: str = "pv_output_full", - pv_path: str = None, - ) -> None: - """defines default parameters, for the rest, see base class""" - - # # adding default material parameter, will be overridden by outside input - # default_p = Parameters() - # default_p["stress_state"] = "plane_strain" * ureg("") # default stress state in 2D, optional "plane_stress" - # - # # updating parameters, overriding defaults - # default_p.update(parameters) - - super().__init__(experiment, parameters, pv_name, pv_path) - - def setup(self) -> None: - # compute different set of elastic moduli - - self.lambda_ = df.fem.Constant( - self.mesh, - self.p["E"] * self.p["nu"] / ((1 + self.p["nu"]) * (1 - 2 * self.p["nu"])), - ) - self.mu = df.fem.Constant(self.mesh, self.p["E"] / (2 * (1 + self.p["nu"]))) - if self.p["dim"] == 2 and self.p["stress_state"].lower() == "plane_stress": - self.lambda_ = df.fem.Constant( - self.mesh, 2.0 * self.mu.value * self.lambda_.value / (self.lambda_.value + 2 * self.mu.value) - ) - - # define function space ets. - self.V = df.fem.VectorFunctionSpace(self.mesh, ("Lagrange", self.p["degree"])) # 2 for quadratic elements - self.V_scalar = df.fem.FunctionSpace(self.mesh, ("Lagrange", self.p["degree"])) - - # Define variational problem - self.u_trial = ufl.TrialFunction(self.V) - self.v = ufl.TestFunction(self.V) - - self.fields = SolutionFields(displacement=df.fem.Function(self.V, name="displacement")) - self.q_fields = QuadratureFields( - measure=ufl.dx, - plot_space_type=("DG", self.p["degree"] - 1), - stress=self.sigma(self.fields.displacement), - strain=self.epsilon(self.fields.displacement), - ) - - # initialize L field, not sure if this is the best way... - zero_field = df.fem.Constant(self.mesh, ScalarType(np.zeros(self.p["dim"]))) - self.L = ufl.dot(zero_field, self.v) * ufl.dx - - # apply external loads - external_force = self.experiment.create_force_boundary(self.v) - if external_force: - self.L = self.L + external_force - - body_force = self.experiment.create_body_force(self.v) - if body_force: - self.L = self.L + body_force - - # boundary conditions only after function space - bcs = self.experiment.create_displacement_boundary(self.V) - - self.a = ufl.inner(self.sigma(self.u_trial), self.epsilon(self.v)) * ufl.dx - self.weak_form_problem = df.fem.petsc.LinearProblem( - self.a, - self.L, - bcs=bcs, - u=self.fields.displacement, - petsc_options={"ksp_type": "preonly", "pc_type": "lu"}, - ) - - @staticmethod - def parameter_description() -> dict[str, str]: - """static method returning a description dictionary for required parameters - - Returns: - description dictionary - - """ - description = { - "g": "gravity", - "dt": "time step", - "rho": "density of fresh concrete", - "E": "Young's Modulus", - "nu": "Poissons Ratio", - "stress_state": "for 2D plain stress or plane strain", - "degree": "Polynomial degree for the FEM model", - "dt": "time step", - } - - return description - - @staticmethod - def default_parameters() -> tuple[Experiment, dict[str, pint.Quantity]]: - """returns a dictionary with required parameters and a set of working values as example""" - # default setup for this material - experiment = CantileverBeam(CantileverBeam.default_parameters()) - - model_parameters = {} - model_parameters["g"] = 9.81 * ureg("m/s^2") - model_parameters["dt"] = 1.0 * ureg("s") - - model_parameters["rho"] = 7750 * ureg("kg/m^3") - model_parameters["E"] = 210e9 * ureg("N/m^2") - model_parameters["nu"] = 0.28 * ureg("") - - model_parameters["stress_state"] = "plane_strain" * ureg("") - model_parameters["degree"] = 2 * ureg("") # polynomial degree - model_parameters["dt"] = 1.0 * ureg("s") - - return experiment, model_parameters - - # Stress computation for linear elastic problem - def epsilon(self, u: ufl.argument.Argument) -> ufl.tensoralgebra.Sym: - return ufl.tensoralgebra.Sym(ufl.grad(u)) - - def sigma(self, u: ufl.argument.Argument) -> ufl.core.expr.Expr: - return self.lambda_ * ufl.nabla_div(u) * ufl.Identity(self.p["dim"]) + 2 * self.mu * self.epsilon(u) - - def solve(self) -> None: - self.update_time() - self.logger.info(f"solving t={self.time}") - self.weak_form_problem.solve() - - # TODO Defined as abstractmethod. Should it depend on sensor instead of material? - self.compute_residuals() - - # get sensor data - for sensor_name in self.sensors: - # go through all sensors and measure - self.sensors[sensor_name].measure(self) - - def compute_residuals(self) -> None: - self.residual = ufl.action(self.a, self.fields.displacement) - self.L - - # paraview output - # TODO move this to sensor definition!?!?! - def pv_plot(self) -> None: - # Displacement Plot - - with df.io.XDMFFile(self.mesh.comm, self.pv_output_file, "a") as f: - f.write_function(self.fields.displacement, self.time) diff --git a/src/fenicsxconcrete/material.py b/src/fenicsxconcrete/material.py new file mode 100644 index 0000000..b8d530a --- /dev/null +++ b/src/fenicsxconcrete/material.py @@ -0,0 +1,19 @@ + +from typing import Callable +from pydantic.dataclasses import dataclass + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class MaterialDefinition: + name: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class LinearElasticMaterial(MaterialDefinition): + mu: float + lam: float + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class MisesPlasticityIsotropicHardening(MaterialDefinition): + mu: float + lam: float + yield_stress: float + hardening_modulus: float | Callable[[float], float] \ No newline at end of file diff --git a/src/fenicsxconcrete/mesh.py b/src/fenicsxconcrete/mesh.py new file mode 100644 index 0000000..584e8f4 --- /dev/null +++ b/src/fenicsxconcrete/mesh.py @@ -0,0 +1,9 @@ +from pydantic.dataclasses import dataclass +@dataclass(config=dict(arbitrary_types_allowed=True)) +class MeshGenerator: + parameters: dict[str, tuple[float, str]] | None + cell_tags: dict[str, int] | None = None + facet_tags: dict[str, int] | None = None + + def generate(degree: int): + pass \ No newline at end of file diff --git a/src/fenicsxconcrete/names.py b/src/fenicsxconcrete/names.py new file mode 100644 index 0000000..6e82d9b --- /dev/null +++ b/src/fenicsxconcrete/names.py @@ -0,0 +1,26 @@ +from pydantic.dataclasses import dataclass +from enum import Enum +from typing import Literal + +FieldVaraibles = Literal["displacement", "temperature", "nonlocal_equivalent_strain"] + +QuadratureVariables = Literal["mandel_stress", "mandel_strain", "mandel_strain_rate"] + +@dataclass +class Quantity: + unit: str + +class BaseQuantity(Enum): + Displacement = Quantity("m") + Velocity = Quantity("m/s") + Acceleration = Quantity("m/s^2") + Temperature = Quantity("K") + Strain = Quantity("1") + Stress = Quantity("Pa") + Density = Quantity("kg/m^3") + Time = Quantity("s") + +class SolutionFields(Enum): + Displacement = Quantity("m") + Temperature = Quantity("K") + NonlocalEquivalentStrain = Quantity("1") diff --git a/src/fenicsxconcrete/problem.py b/src/fenicsxconcrete/problem.py new file mode 100644 index 0000000..67a4607 --- /dev/null +++ b/src/fenicsxconcrete/problem.py @@ -0,0 +1,73 @@ +from pydantic.dataclasses import dataclass + +from pydantic import RootModel + +from bcs import ( + BodyForceDefinition, + DirichletBCDefinition, + InitialConditionDefinition, + NeumannBCDefinition, +) +from material import LinearElasticMaterial +from mesh import MeshGenerator + +from sensor import PlotDefinition, PointSensorDefinition, GlobalSensorDefinition +from experiment import Experiment + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class QuadratureRuleDefinition: + name: str + order: int + variable: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class FiniteElementDefinition: + name: str + geometry_order: int + function_order: int + variable: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class FEMProblemDefinition: + experiment: Experiment + sensors: list[PointSensorDefinition | GlobalSensorDefinition] + plots: list[PlotDefinition] + element_type: list[FiniteElementDefinition] + quadrature_rule: list[QuadratureRuleDefinition] + + +if __name__ == "__main__": + bc = DirichletBCDefinition(marker=1, value=[0.0, 0.0], subspace=0, variable="displacement") + neumann = NeumannBCDefinition(marker=2, value=[-42.0], variable="displacement") + initial = InitialConditionDefinition(value=[42.24], variable="density") + body_force = BodyForceDefinition(value=[0.0, 0.0, 9.81], variable="displacement") + mat = LinearElasticMaterial(name="steel", mu=1.0, lam=2.0) + geo = MeshGenerator(parameters={"length": (1, "m")}, mesh_tags={"left": 0, "right": 1, "top": 2, "bottom": 3}) + solution_fields = ["displacement"] + time = (0.0, 1.0) + exp = Experiment( + dirichlet_bcs=[bc], + neumann_bcs=[neumann], + initial_conditions=[initial], + body_forces=[body_force], + geometry=geo, + solution_fields=solution_fields, + time=time, + material=mat, + ) + + sensor = PointSensorDefinition(location=(0.0, 0.0, 0.0), variable="displacement", unit="m") + global_sensor = GlobalSensorDefinition(variable="energy", unit="J") + + plot = PlotDefinition(variable="displacement", unit="m") + + problem = FEMProblemDefinition( + experiment=exp, + sensors=[sensor, global_sensor], + plots=[plot], + element_type=[FiniteElementDefinition(name="P", geometry_order=1, function_order=1, variable="displacement")], + quadrature_rule=[QuadratureRuleDefinition(name="Gauss", order=2, variable="displacement")], + ) + + print(RootModel[Experiment](exp).model_dump_json(indent=4)) + print(RootModel[FEMProblemDefinition](problem).model_dump_json(indent=4, serialize_as_any=True)) diff --git a/src/fenicsxconcrete/sensor.py b/src/fenicsxconcrete/sensor.py new file mode 100644 index 0000000..2a06742 --- /dev/null +++ b/src/fenicsxconcrete/sensor.py @@ -0,0 +1,67 @@ + +from pydantic.dataclasses import dataclass +from typing import Callable +import numpy as np + + + + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class PointSensorDefinition: + """ + Definition of a point sensor + + Args: + location: The location of the sensor + variable: The variable to be measured + + """ + location: tuple[float,float,float] + variable: str + unit: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class GlobalSensorDefinition: + variable: str + unit: str + +@dataclass(config=dict(arbitrary_types_allowed=True)) +class PlotDefinition: + variable: str + #mapping: Callable + unit: str + +# @dataclass +# class DolfinXPointSensor: +# cells: list[int] +# plot_function: df.fem.Function | None +# function: df.fem.Function +# mapping: Callable +# definition: PointSensorDefinition + +# def __init__(self, sensor: SensorDefinition, function: df.fem.FunctionSpace, plot_function: df.fem.Function | None = None): +# pass + +@dataclass +class Sensors: + groups: dict[str, list[PointSensorDefinition | GlobalSensorDefinition | PlotDefinition]] + #plot_functions: dict[str, df.fem.Function | None] + #functions: dict[str, df.fem.Function] + + def measure(self, t: float): + for plot_function, function in zip(self.plot_functions.values(), self.functions.values()): + if plot_function is not None: + # projection here for all sensors that use the same variable + # FEniCS code comes here + pass + for group, sensors in self.groups.items(): + for sensor in sensors: + match sensor: + case PointSensorDefinition(location, variable, mapping, unit): + pass + case GlobalSensorDefinition(variable, mapping, unit): + pass + case PlotDefinition(variable, mapping, unit): + pass + case _: + pass \ No newline at end of file diff --git a/src/fenicsxconcrete/sensor_definition/README.md b/src/fenicsxconcrete/sensor_definition/README.md deleted file mode 100644 index a46800e..0000000 --- a/src/fenicsxconcrete/sensor_definition/README.md +++ /dev/null @@ -1 +0,0 @@ -# files related to sensors \ No newline at end of file diff --git a/src/fenicsxconcrete/sensor_definition/__init__.py b/src/fenicsxconcrete/sensor_definition/__init__.py deleted file mode 100644 index 44ee4c0..0000000 --- a/src/fenicsxconcrete/sensor_definition/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .base_sensor import BaseSensor, PointSensor -from .displacement_sensor import DisplacementSensor -from .doh_sensor import DOHSensor -from .reaction_force_sensor import ReactionForceSensor -from .sensor_schema import generate_sensor_schema -from .strain_sensor import StrainSensor -from .stress_sensor import StressSensor -from .temperature_sensor import TemperatureSensor -from .youngs_modulus_sensor import YoungsModulusSensor diff --git a/src/fenicsxconcrete/sensor_definition/base_sensor.py b/src/fenicsxconcrete/sensor_definition/base_sensor.py deleted file mode 100644 index 50684e9..0000000 --- a/src/fenicsxconcrete/sensor_definition/base_sensor.py +++ /dev/null @@ -1,165 +0,0 @@ -from __future__ import annotations - -import os -from abc import ABC, abstractmethod - -import pint - -from fenicsxconcrete.util import LogMixin, ureg - - -# sensor template -class BaseSensor(ABC, LogMixin): - """Template for a sensor object - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - """ - - def __init__(self, name: str | None = None) -> None: - """initializes the sensor - - Args: - name: optional argument to set a specific sensor name - """ - self.data = [] - self.time = [] - self.units = self.base_unit() - if name is None: - self.name = self.__class__.__name__ - else: - self.name = name - - # Can't use type hint here, because it would create a circular import - @abstractmethod - def measure(self, problem) -> None: - """Needs to be implemented in child, depends on the sensor - - This function is called, when the sensor adds the data to the data list. - """ - - @staticmethod - @abstractmethod - def base_unit() -> pint.Unit: - """Defines the base unit of this sensor""" - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = {} - metadata["id"] = self.name - metadata["type"] = self.__class__.__name__ - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - metadata["units"] = f"{self.units._units}" - metadata["dimensionality"] = f"{self.units.dimensionality}" - return metadata - - def get_data_list(self) -> pint.Quantity[list]: - """Returns the measured data with respective unit - - Returns: - measured data list with respective unit - """ - data = self.data * self.base_unit() # add base units - data.ito(self.units) # convert to target units - return data - - def get_time_list(self) -> pint.Quantity[list]: - """Returns the time data with respective unit - - Returns: - the time stamp list with the respective unit - """ - return self.time * ureg.second - - def get_data_at_time(self, t: float) -> pint.Quantity: - """Returns the measured data at a specific time - - Returns: - measured data at the specified time with the unit - - Raises: - ValueError: If there is no value t in time list - """ - try: - i = self.time.index(t) - except ValueError: # I want my own value error that is meaningful to the input - raise ValueError(f"There is no data measured at time {t}") - - data = self.data[i] * self.base_unit() # add base units - data.ito(self.units) # convert to target units - - return data - - def get_last_entry(self) -> pint.Quantity: - """Returns the measured data with respective unit - - Returns: - the measured data list with the respective unit - - Raises: - RuntimeError: If the data list is empty - """ - if len(self.data) > 0: - data = self.data[-1] * self.base_unit() # add base units - data.ito(self.units) # convert to target units - return data - else: - raise RuntimeError("There is no measured data to retrieve.") - - def set_units(self, units: str) -> None: - """sets the units which the sensor should return - - the unit must match the dimensionality of the base unit - - Args: - units: name of the units to convert to, must be defined in pint unit registry - """ - new_unit = ureg(units) - assert self.base_unit().dimensionality == new_unit.dimensionality - self.units = new_unit - - -class PointSensor(BaseSensor): - """ - Abstract class for a sensor that measures values at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - def __init__(self, where: list[int | float], name: str | None = None) -> None: - """ - initializes a point sensor, for further details, see base class - - Arguments: - where : Point where to measure - name : name of the sensor - """ - super().__init__(name=name) - self.where = where - - @abstractmethod - def measure(self, problem) -> None: - """Needs to be implemented in child, depending on the sensor""" - - @staticmethod - @abstractmethod - def base_unit() -> pint.Unit: - """Defines the base unit of this sensor, must be specified by child""" - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - if isinstance(self.where, list): - metadata["where"] = self.where - else: - metadata["where"] = list(self.where) - return metadata diff --git a/src/fenicsxconcrete/sensor_definition/displacement_sensor.py b/src/fenicsxconcrete/sensor_definition/displacement_sensor.py deleted file mode 100644 index aba4d36..0000000 --- a/src/fenicsxconcrete/sensor_definition/displacement_sensor.py +++ /dev/null @@ -1,69 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import ureg - - -class DisplacementSensor(PointSensor): - """A sensor that measures displacement at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - # Type hints don't work here because they create a circular import :( - def measure(self, problem: MaterialProblem) -> None: - """ - The displacement value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - # get displacements - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - - # for i, point in enumerate(self.where): - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to displacement - displacement_data = problem.fields.displacement.eval([self.where], cells) - - self.data.append(displacement_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg.meter diff --git a/src/fenicsxconcrete/sensor_definition/doh_sensor.py b/src/fenicsxconcrete/sensor_definition/doh_sensor.py deleted file mode 100644 index cc10eb0..0000000 --- a/src/fenicsxconcrete/sensor_definition/doh_sensor.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df -import ufl - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import project, ureg - - -class DOHSensor(PointSensor): - """A sensor that measures degree of hydration at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - def measure(self, problem: MaterialProblem) -> None: - """ - The degree of hydration value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - - try: - degree_of_hydration = problem.q_fields.degree_of_hydration - assert degree_of_hydration is not None - except AssertionError: - raise Exception("Strain not defined in problem") - - strain_function = project( - degree_of_hydration, # stress fct from problem - df.fem.FunctionSpace(problem.experiment.mesh, problem.q_fields.plot_space_type), # tensor space - problem.q_fields.measure, - ) - # project stress onto visualization space - - # finding the cells corresponding to the point - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to stress - strain_data = strain_function.eval([self.where], cells) - - self.data.append(strain_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg("") diff --git a/src/fenicsxconcrete/sensor_definition/reaction_force_sensor.py b/src/fenicsxconcrete/sensor_definition/reaction_force_sensor.py deleted file mode 100644 index 04cf7e6..0000000 --- a/src/fenicsxconcrete/sensor_definition/reaction_force_sensor.py +++ /dev/null @@ -1,123 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING, TypedDict - -import dolfinx as df -import ufl - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -import dolfinx as df -import ufl - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.sensor_definition.base_sensor import BaseSensor -from fenicsxconcrete.util import ureg - - -class Surface(TypedDict): - """A typed dictionary class to define a surface - - Attributes: - function: name of the function to be called to ge the surface - args: additional arguments to be passed to such a function - """ - - function: str - args: dict - - -class ReactionForceSensor(BaseSensor): - """A sensor that measures the reaction force at a specified surface - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - surface : dictionary that defines the surface where the reaction force is measured - Args: - surface : a dictionary that defines the function for the reaction boundary, default is the bottom surface - name : name of the sensor, default is class name, but can be changed - """ - - def __init__(self, surface: Surface | None = None, name: str | None = None) -> None: - super().__init__(name=name) - self.surface_dict = surface - - def measure(self, problem: MaterialProblem) -> None: - """ - The reaction force vector of the defined surface is added to the data list, - as well as the time t to the time list - - Args: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - # boundary condition - if self.surface_dict is None: - self.surface = problem.experiment.boundary_bottom() - else: - self.surface = getattr(problem.experiment, self.surface_dict["function"])(**self.surface_dict["args"]) - - v_reac = df.fem.Function(problem.fields.displacement.function_space) - - reaction_force_vector = [] - - bc_generator_x = BoundaryConditions(problem.mesh, problem.V) - bc_generator_x.add_dirichlet_bc( - value=df.fem.Constant(domain=problem.mesh, c=1.0), - boundary=self.surface, - sub=0, - method="geometrical", - entity_dim=problem.mesh.topology.dim - 1, - ) - df.fem.set_bc(v_reac.vector, bc_generator_x.bcs) - computed_force_x = -df.fem.assemble_scalar(df.fem.form(ufl.action(problem.residual, v_reac))) - reaction_force_vector.append(computed_force_x) - - bc_generator_y = BoundaryConditions(problem.mesh, problem.V) - bc_generator_y.add_dirichlet_bc( - value=df.fem.Constant(domain=problem.mesh, c=1.0), - boundary=self.surface, - sub=1, - method="geometrical", - entity_dim=problem.mesh.topology.dim - 1, - ) - df.fem.set_bc(v_reac.vector, bc_generator_y.bcs) - computed_force_y = -df.fem.assemble_scalar(df.fem.form(ufl.action(problem.residual, v_reac))) - reaction_force_vector.append(computed_force_y) - - if problem.p["dim"] == 3: - bc_generator_z = BoundaryConditions(problem.mesh, problem.V) - bc_generator_z.add_dirichlet_bc( - value=df.fem.Constant(domain=problem.mesh, c=1.0), - boundary=self.surface, - sub=2, - method="geometrical", - entity_dim=problem.mesh.topology.dim - 1, - ) - df.fem.set_bc(v_reac.vector, bc_generator_z.bcs) - computed_force_z = -df.fem.assemble_scalar(df.fem.form(ufl.action(problem.residual, v_reac))) - reaction_force_vector.append(computed_force_z) - - self.data.append(reaction_force_vector) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["surface"] = self.surface_dict - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg.newton diff --git a/src/fenicsxconcrete/sensor_definition/sensor_schema.py b/src/fenicsxconcrete/sensor_definition/sensor_schema.py deleted file mode 100644 index 86e8507..0000000 --- a/src/fenicsxconcrete/sensor_definition/sensor_schema.py +++ /dev/null @@ -1,158 +0,0 @@ -def generate_sensor_schema() -> dict: - """Function that returns the sensor schema. Necessary to include the schema in the package accessible. - - Returns: - Schema for sensor's list metadata - """ - - schema = { - "$schema": "http://json-schema.org/2020-12/schema#", - "title": "SensorsList", - "type": "object", - "properties": { - "sensors": { - "type": "array", - "items": { - "oneOf": [ - {"$ref": "#/definitions/BaseSensor"}, - {"$ref": "#/definitions/PointSensor"}, - {"$ref": "#/definitions/DisplacementSensor"}, - {"$ref": "#/definitions/TemperatureSensor"}, - {"$ref": "#/definitions/ReactionForceSensor"}, - {"$ref": "#/definitions/StrainSensor"}, - {"$ref": "#/definitions/StressSensor"}, - {"$ref": "#/definitions/YoungsModulusSensor"}, - {"$ref": "#/definitions/DOHSensor"}, - ] - }, - } - }, - "definitions": { - "baseSensorProperties": { - "type": "object", - "properties": { - "id": {"type": "string", "description": "A unique identifier for the sensor"}, - "type": {"type": "string", "description": "The python class for the sensor"}, - "units": {"type": "string", "description": "The unit of measurement for the sensor"}, - "dimensionality": { - "type": "string", - "description": "The dimensionality of measurement for the sensor between brackets []", - }, - "sensor_file": { - "type": "string", - "description": "Python file where the sensor is defined whithout extension", - }, - }, - "required": ["id", "type", "units", "dimensionality"], - }, - "pointSensorProperties": { - "allOf": [ - {"$ref": "#/definitions/baseSensorProperties"}, - { - "type": "object", - "properties": {"where": {"type": "array", "description": "Location of the sensor"}}, - "required": ["where"], - }, - ] - }, - "BaseSensor": { - "allOf": [ - {"$ref": "#/definitions/baseSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "BaseSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "PointSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "PointSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "DisplacementSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "DisplacementSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "TemperatureSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "TemperatureSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "ReactionForceSensor": { - "allOf": [ - {"$ref": "#/definitions/baseSensorProperties"}, - { - "type": "object", - "properties": { - "type": {"const": "ReactionForceSensor", "description": "The type of sensor"}, - "surface": { - "type": "string", - "description": "Surface where the reactionforce is measured", - }, - }, - "required": ["type", "surface"], - }, - ] - }, - "StrainSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "StrainSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "StressSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "StressSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - "YoungsModulusSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": { - "type": {"const": "YoungsModulusSensor", "description": "The type of sensor"}, - }, - "required": ["type"], - }, - ] - }, - "DOHSensor": { - "allOf": [ - {"$ref": "#/definitions/pointSensorProperties"}, - { - "type": "object", - "properties": {"type": {"const": "DOHSensor", "description": "The type of sensor"}}, - "required": ["type"], - }, - ] - }, - }, - } - return schema diff --git a/src/fenicsxconcrete/sensor_definition/strain_sensor.py b/src/fenicsxconcrete/sensor_definition/strain_sensor.py deleted file mode 100644 index 26b4ad9..0000000 --- a/src/fenicsxconcrete/sensor_definition/strain_sensor.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df -import ufl - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import project, ureg - - -class StrainSensor(PointSensor): - """A sensor that measures strain at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - def measure(self, problem: MaterialProblem) -> None: - """ - The strain value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - - try: - strain = problem.q_fields.strain - assert strain is not None - except AssertionError: - raise Exception("Strain not defined in problem") - - strain_function = project( - strain, # stress fct from problem - df.fem.TensorFunctionSpace(problem.experiment.mesh, problem.q_fields.plot_space_type), # tensor space - problem.q_fields.measure, - ) - # project stress onto visualization space - - # finding the cells corresponding to the point - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to stress - strain_data = strain_function.eval([self.where], cells) - - self.data.append(strain_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg("") diff --git a/src/fenicsxconcrete/sensor_definition/stress_sensor.py b/src/fenicsxconcrete/sensor_definition/stress_sensor.py deleted file mode 100644 index 305675f..0000000 --- a/src/fenicsxconcrete/sensor_definition/stress_sensor.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df -import ufl - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import project, ureg - - -class StressSensor(PointSensor): - """A sensor that measures stress at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - def measure(self, problem: MaterialProblem) -> None: - """ - The stress value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - # project stress onto visualization space - try: - stress = problem.q_fields.stress - assert stress is not None - except AssertionError: - raise Exception("Stress not defined in problem") - - stress_function = project( - stress, # stress fct from problem - df.fem.TensorFunctionSpace(problem.experiment.mesh, problem.q_fields.plot_space_type), # tensor space - problem.q_fields.measure, - ) - - # finding the cells corresponding to the point - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to stress - stress_data = stress_function.eval([self.where], cells) - - self.data.append(stress_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg("N/m^2") diff --git a/src/fenicsxconcrete/sensor_definition/temperature_sensor.py b/src/fenicsxconcrete/sensor_definition/temperature_sensor.py deleted file mode 100644 index 05c8173..0000000 --- a/src/fenicsxconcrete/sensor_definition/temperature_sensor.py +++ /dev/null @@ -1,69 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import ureg - - -class TemperatureSensor(PointSensor): - """A sensor that measures temperature at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - # Type hints don't work here because they create a circular import :( - def measure(self, problem: MaterialProblem) -> None: - """ - The displacement value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - # get displacements - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - - # for i, point in enumerate(self.where): - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to displacement - temperature_data = problem.fields.temperature.eval([self.where], cells) - - self.data.append(temperature_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg("K") diff --git a/src/fenicsxconcrete/sensor_definition/youngs_modulus_sensor.py b/src/fenicsxconcrete/sensor_definition/youngs_modulus_sensor.py deleted file mode 100644 index cd521df..0000000 --- a/src/fenicsxconcrete/sensor_definition/youngs_modulus_sensor.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -import dolfinx as df -import ufl - -if TYPE_CHECKING: - from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem - -from fenicsxconcrete.sensor_definition.base_sensor import PointSensor -from fenicsxconcrete.util import project, ureg - - -class YoungsModulusSensor(PointSensor): - """A sensor that measures degree of hydration at a specific point - - Attributes: - data: list of measured values - time: list of time stamps - units : pint definition of the base unit a sensor returns - name : name of the sensor, default is class name, but can be changed - where: location where the value is measured - """ - - def measure(self, problem: MaterialProblem) -> None: - """ - The degree of hydration value at the defined point is added to the data list, - as well as the time t to the time list - - Arguments: - problem : FEM problem object - t : time of measurement for time dependent problems, default is 1 - """ - - try: - youngs_modulus = problem.q_fields.youngs_modulus - assert youngs_modulus is not None - except AssertionError: - raise Exception("Strain not defined in problem") - - strain_function = project( - youngs_modulus, # stress fct from problem - df.fem.FunctionSpace(problem.experiment.mesh, problem.q_fields.plot_space_type), # tensor space - problem.q_fields.measure, - ) - # project stress onto visualization space - - # finding the cells corresponding to the point - bb_tree = df.geometry.BoundingBoxTree(problem.experiment.mesh, problem.experiment.mesh.topology.dim) - cells = [] - - # Find cells whose bounding-box collide with the points - cell_candidates = df.geometry.compute_collisions(bb_tree, [self.where]) - - # Choose one of the cells that contains the point - colliding_cells = df.geometry.compute_colliding_cells(problem.experiment.mesh, cell_candidates, [self.where]) - if len(colliding_cells.links(0)) > 0: - cells.append(colliding_cells.links(0)[0]) - - # adding correct units to stress - strain_data = strain_function.eval([self.where], cells) - - self.data.append(strain_data) - self.time.append(problem.time) - - def report_metadata(self) -> dict: - """Generates dictionary with the metadata of this sensor""" - metadata = super().report_metadata() - metadata["sensor_file"] = os.path.splitext(os.path.basename(__file__))[0] - return metadata - - @staticmethod - def base_unit() -> ureg: - """Defines the base unit of this sensor - - Returns: - the base unit as pint unit object - """ - return ureg("N/m^2") diff --git a/src/fenicsxconcrete/util/__init__.py b/src/fenicsxconcrete/util/__init__.py deleted file mode 100644 index b48e8df..0000000 --- a/src/fenicsxconcrete/util/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .logger import LogMixin -from .mori_tanaka_homogenization import ConcreteHomogenization -from .parameters import Parameters -from .projection import project -from .quadrature import QuadratureEvaluator, QuadratureRule -from .unit_registry import ureg diff --git a/src/fenicsxconcrete/util/logger.py b/src/fenicsxconcrete/util/logger.py deleted file mode 100644 index 9ca2315..0000000 --- a/src/fenicsxconcrete/util/logger.py +++ /dev/null @@ -1,8 +0,0 @@ -import logging - - -class LogMixin(object): - @property - def logger(self): - name = self.__class__.__module__ - return logging.getLogger(name) diff --git a/src/fenicsxconcrete/util/mori_tanaka_homogenization.py b/src/fenicsxconcrete/util/mori_tanaka_homogenization.py deleted file mode 100644 index 7d6309d..0000000 --- a/src/fenicsxconcrete/util/mori_tanaka_homogenization.py +++ /dev/null @@ -1,409 +0,0 @@ -import numpy as np -import pytest - - -def get_e_nu_from_k_g(K: float, G: float) -> tuple[float, float]: - """ - Computes Young's modulus and Poisson's ratio from bulk and shear modulus - - Args: - K: bulk modulus - G: shear modulus - - Returns: - Young's modulus and Poisson's ratio - """ - E = 9 * K * G / (3 * K + G) - nu = (3 * K - 2 * G) / (2 * (3 * K + G)) - return E, nu - - -def get_k_g_from_e_nu(E: float, nu: float) -> tuple[float, float]: - """ - Computes bulk and shear modulus from Young's modulus and Poisson's ratio - - Args: - E: Young's modulus - nu: Poisson's ratio - - Returns: - bulk and shear modulus - """ - K = E / (3 * (1 - 2 * nu)) - G = E / (2 * (1 + nu)) - return K, G - - -class ConcreteHomogenization: - """ - Class to compute homogenized parameters for cement matrix and aggregates - - Args: - - E_matrix: Young's modulus of matrix material - nu_matrix: Poisson's Ratio of matrix material - fc_matrix: Compressive strength of the matrix - rho_matrix: Density of the matrix - kappa_matrix: Thermal conductivity of the matrix - C_matrix: Specific heat capacity of the matrix in energy per weight - Q_matrix: Heat release in energy per weight of binder - """ - - def __init__( - self, - E_matrix: float, - nu_matrix: float, - fc_matrix: float, - rho_matrix: float = 1.0, - kappa_matrix: float = 1.0, - C_matrix: float = 1.0, - Q_matrix: float = 1.0, - ) -> None: - self.E_matrix = E_matrix - self.nu_matrix = nu_matrix - self.fc_matrix = fc_matrix - self.kappa_matrix = kappa_matrix - self.C_matrix = C_matrix - self.rho_matrix = rho_matrix - self.Q_matrix = Q_matrix - self.vol_frac_matrix = 1 - self.vol_frac_binder = 1 # when coated inclusions are considered these still count as binder volume - self.Q_vol_eff = self.Q_matrix * self.rho_matrix * self.vol_frac_binder - - self.K_matrix, self.G_matrix = get_k_g_from_e_nu(E_matrix, nu_matrix) - - # initial values - self.K_eff = self.K_matrix - self.G_eff = self.G_matrix - self.E_eff = E_matrix - self.nu_eff = nu_matrix - self.fc_eff = fc_matrix - self.kappa_eff = kappa_matrix - self.C_vol_eff = C_matrix * rho_matrix - self.rho_eff = rho_matrix - - # list for inclusion values (all phases that are not matrix - self.n_incl = 0 - self.vol_frac_incl = [] - - self.A_dill_vol_incl = [] - self.A_dill_dev_incl = [] - self.G_incl = [] - self.K_incl = [] - self.A_therm_incl = [] # thermal conductivity - self.kappa_incl = [] - self.C_incl = [] - self.rho_incl = [] - - # auxiliary factors following Eshelby solution [Eshely, 1957] - # required for uncoated computation - self.alpha_0 = (1 + nu_matrix) / (3 * (1 + nu_matrix)) - self.beta_0 = 2 * (4 - 5 * nu_matrix) / (15 * (1 - nu_matrix)) - - def add_uncoated_particle( - self, E: float, nu: float, volume_fraction: float, rho: float = 1.0, kappa: float = 1.0, C: float = 1.0 - ) -> None: - """ - Adds a phase of uncoated material - - the particles are assumed to be homogeneous and spherical - sets particle properties - setup function called - - Args: - E: Young's modulus of particle material - nu: Poisson's Ratio of particle material - volume_fraction: Volume fraction of the particle within the composite - rho: Density - kappa: Thermal conductivity - C: Specific heat capacity in energy per weight - - """ - K, G = get_k_g_from_e_nu(E, nu) - - A_dil_vol = self.K_matrix / (self.K_matrix + self.alpha_0 * (K - self.K_matrix)) - A_dil_dev = self.G_matrix / (self.G_matrix + self.beta_0 * (G - self.G_matrix)) - - # thermal concentration factor - A_therm = 3 * self.kappa_matrix / (2 * self.kappa_matrix + kappa) - - # update global fields - self.n_incl = self.n_incl + 1 - self.vol_frac_incl.append(volume_fraction) - self.A_dill_vol_incl.append(A_dil_vol) - self.A_dill_dev_incl.append(A_dil_dev) - self.G_incl.append(G) - self.K_incl.append(K) - self.vol_frac_matrix = self.vol_frac_matrix - volume_fraction - self.vol_frac_binder = self.vol_frac_binder - volume_fraction - self.A_therm_incl.append(A_therm) - self.kappa_incl.append(kappa) - self.rho_incl.append(rho) - self.C_incl.append(C) - - if self.vol_frac_matrix < 0: - raise Exception("Volume fraction of matrix can not be smaller than zero!") - - self.update_effective_fields() - - def add_coated_particle( - self, - E_inclusion: float, - nu_inclusion: float, - itz_ratio: float, - radius: float, - coat_thickness: float, - volume_fraction: float, - rho: float = 1.0, - kappa: float = 1.0, - C: float = 1.0, - ): - """ - Adds a phase of coated material - - the particles are assumed to be homogeneous and spherical, coated by degraded matrix material - the computation is based on the formulation of Herve-Zaoui,???? and taken from the paper of - ......., - - sets partilce and coating properties - setup function called - - Args: - E_inclusion: Young's modulus of particle material - nu_inclusion: Poisson's Ratio of particle material - itz_ratio: Value of the reduction of the stiffness of the material surrounding the particles - radius: Radius of particles - coat_thickness: Thickness of the coating - volume_fraction: Volume fraction of the particle within the composite - rho: Density of the inclusion - k: Thermal conductivity of the particle, the coat is ignored - C: Specific heat capacity of the inclusion in energy per weight - """ - # set values - inclusion, coating, matrix - E = np.array([E_inclusion, self.E_matrix * itz_ratio, self.E_matrix]) - nu = np.array([nu_inclusion, self.nu_matrix, self.nu_matrix]) - - # list with radius for inclusion and coating - R = np.array([radius, radius + coat_thickness]) - - # compute volume fraction of itz - itz_vol_frac = (((radius + coat_thickness) / radius) ** 3 - 1) * volume_fraction - - # compute shear and bulk modulus - # G = E / (2 * (1 + nu)) # G, shear modulus for the different phases - # K = E / (3 * (1 - 2 * nu)) # K, bulk modulus for the different phases - K, G = get_k_g_from_e_nu(E, nu) - - # for the influence on the overall stiffness three auxiliary factor are computed for each of the two phases - # Q, A and B, to efficiently compute these many steps are computed first - - # initialize two axillary matrices N(2 x 2) and M(4 x 4) for coated particle, on for each of the two phases - N = [np.empty((2, 2)), np.empty((2, 2))] - M = [np.empty((4, 4)), np.empty((4, 4))] - - # loop over the two phases (inclusion, k=0 and coating, k=1) - self.x_list = [0, 0] - for k in range(2): - # more auxiliary variables a through f - a = G[k] / G[k + 1] * (7 + 5 * nu[k]) * (7 - 10 * nu[k + 1]) - (7 - 10 * nu[k]) * (7 + 5 * nu[k + 1]) - b = G[k] / G[k + 1] * (7 + 5 * nu[k]) + 4 * (7 - 10 * nu[k]) - c = (7 - 5 * nu[k + 1]) + 2 * (4 - 5 * nu[k + 1]) * G[k] / G[k + 1] - d = (7 + 5 * nu[k + 1]) + 4 * (7 - 10 * nu[k + 1]) * G[k] / G[k + 1] - e = 2 * (4 - 5 * nu[k]) + G[k] / G[k + 1] * (7 - 5 * nu[k]) - f = (4 - 5 * nu[k]) * (7 - 5 * nu[k + 1]) - G[k] / G[k + 1] * (4 - 5 * nu[k + 1]) * (7 - 5 * nu[k]) - alpha = G[k] / G[k + 1] - 1 - - self.x_list[k] = [G[k], G[k + 1], a, b, c, d, e, f, alpha] - - M[k][0][0] = c / 3 - M[k][0][1] = R[k] ** 2 * (3 * b - 7 * c) / (5 * (1 - 2 * nu[k])) - M[k][0][2] = -12 * alpha / (R[k] ** 5) - M[k][0][3] = 4 * (f - 27 * alpha) / (15 * R[k] ** 3 * (1 - 2 * nu[k])) - M[k][1][0] = 0 - M[k][1][1] = b * (1 - 2 * nu[k + 1]) / (7 * (1 - 2 * nu[k])) - M[k][1][2] = -20 * alpha * (1 - 2 * nu[k + 1]) / (7 * R[k] ** 7) - M[k][1][3] = -12 * alpha * (1 - 2 * nu[k + 1]) / (7 * R[k] ** 5 * (1 - 2 * nu[k])) - M[k][2][0] = R[k] ** 5 * alpha / 2 - M[k][2][1] = -R[k] ** 7 * (2 * a + 147 * alpha) / (70 * (1 - 2 * nu[k])) - M[k][2][2] = d / 7 - M[k][2][3] = ( - R[k] ** 2 - * (105 * (1 - nu[k + 1]) + 12 * alpha * (7 - 10 * nu[k + 1]) - 7 * e) - / (35 * (1 - 2 * nu[k])) - ) - M[k][3][0] = -5 * alpha * R[k] ** 3 * (1 - 2 * nu[k + 1]) / 6 - M[k][3][1] = 7 * alpha * R[k] ** 5 * (1 - 2 * nu[k + 1]) / (2 * (1 - 2 * nu[k])) - M[k][3][2] = 0 - M[k][3][3] = e * (1 - 2 * nu[k + 1]) / (3 * (1 - 2 * nu[k])) - # divide all by some factor - M[k] = M[k] / (5 * (1 - nu[k + 1])) - - N[k][0][0] = 3 * K[k] + 4 * G[k + 1] - N[k][0][1] = 4 / R[k] ** 3 * (G[k + 1] - G[k]) - N[k][1][0] = 3 * R[k] ** 3 * (K[k + 1] - K[k]) - N[k][1][1] = 3 * K[k + 1] + 4 * G[k] - - N[k] = N[k] / (3 * K[k + 1] + 4 * G[k + 1]) - - # initialize more auxiliary fields Q, P, W, A, B - Q = [[], []] - P = [[], []] - A = [[], []] - B = [[], []] - - Q[0] = N[0] - Q[1] = N[1].dot(N[0]) - - P[0] = M[0] - P[1] = M[1].dot(M[0]) - - W = ( - 1 - / (P[1][1][1] * P[1][0][0] - P[1][0][1] * P[1][1][0]) - * P[0].dot(np.array([[P[1][1][1]], [-P[1][1][0]], [0], [0]])) - ) - - A[0] = P[1][1][1] / (P[1][1][1] * P[1][0][0] - P[1][0][1] * P[1][1][0]) - A[1] = W[0][0] - - B[0] = -P[1][1][0] / (P[1][1][1] * P[1][0][0] - P[1][0][1] * P[1][1][0]) - B[1] = W[1][0] - - # finally the required dillition factors, volumetric and deviatoric are computed! - - A_dil_vol_incl = 1 / Q[1][0][0] - A_dil_vol_coat = Q[0][0][0] / Q[1][0][0] - A_dil_dev_incl = A[0] - 21 / 5 * R[0] ** 2 / (1 - 2 * nu[0]) * B[0] - A_dil_dev_coat = A[1] - 21 / 5 * (R[1] ** 5 - R[0] ** 5) / ((1 - 2 * nu[1]) * (R[1] ** 3 - R[0] ** 3)) * B[1] - - # thermal concentration factor - # coating is set to matrix material - A_therm = 3 * self.kappa_matrix / (2 * self.kappa_matrix + kappa) - - # update global fields - # inclusion data - self.vol_frac_incl.append(volume_fraction) - self.A_dill_vol_incl.append(A_dil_vol_incl) - self.A_dill_dev_incl.append(A_dil_dev_incl) - self.G_incl.append(G[0]) - self.K_incl.append(K[0]) - self.A_therm_incl.append(A_therm) - self.kappa_incl.append(kappa) - self.rho_incl.append(rho) - self.C_incl.append(C) - # coating data - self.vol_frac_incl.append(itz_vol_frac) - self.A_dill_vol_incl.append(A_dil_vol_coat) - self.A_dill_dev_incl.append(A_dil_dev_coat) - self.G_incl.append(G[1]) - self.K_incl.append(K[1]) - self.A_therm_incl.append(1) # coating is set to matrix material - self.kappa_incl.append(self.kappa_matrix) - self.rho_incl.append(self.rho_matrix) - self.C_incl.append(self.C_matrix) - # overall infos - self.n_incl = self.n_incl + 2 - - self.vol_frac_matrix = self.vol_frac_matrix - volume_fraction - itz_vol_frac - self.vol_frac_binder = self.vol_frac_binder - volume_fraction - - if self.vol_frac_matrix < 0: - raise Exception("Volume fraction of matrix can not be smaller than zero!") - - self.update_effective_fields() - - def update_effective_fields(self) -> None: - K_eff_numerator = self.vol_frac_matrix * self.K_matrix - K_eff_denominator = self.vol_frac_matrix - G_eff_numerator = self.vol_frac_matrix * self.G_matrix - G_eff_denominator = self.vol_frac_matrix - kappa_eff_numerator = self.vol_frac_matrix * self.kappa_matrix - kappa_eff_denominator = self.vol_frac_matrix - self.rho_eff = self.vol_frac_matrix * self.rho_matrix - self.C_vol_eff = self.vol_frac_matrix * self.C_matrix * self.rho_matrix - vol_test = self.vol_frac_matrix - - for i in range(self.n_incl): - K_eff_numerator += self.vol_frac_incl[i] * self.K_incl[i] * self.A_dill_vol_incl[i] - K_eff_denominator += self.vol_frac_incl[i] * self.A_dill_vol_incl[i] - G_eff_numerator += self.vol_frac_incl[i] * self.G_incl[i] * self.A_dill_dev_incl[i] - G_eff_denominator += self.vol_frac_incl[i] * self.A_dill_dev_incl[i] - kappa_eff_numerator += self.vol_frac_incl[i] * self.kappa_incl[i] * self.A_therm_incl[i] - kappa_eff_denominator += self.vol_frac_incl[i] * self.A_therm_incl[i] - self.rho_eff += self.vol_frac_incl[i] * self.rho_incl[i] - self.C_vol_eff += self.vol_frac_incl[i] * self.C_incl[i] * self.rho_incl[i] - vol_test += self.vol_frac_incl[i] - - assert vol_test == pytest.approx(1) # sanity check that vol fraction have been corretly computed - - # compute effective properties - self.K_eff = K_eff_numerator / K_eff_denominator - self.G_eff = G_eff_numerator / G_eff_denominator - self.E_eff = 9 * self.K_eff * self.G_eff / (3 * self.K_eff + self.G_eff) - self.nu_eff = (3 * self.K_eff - 2 * self.G_eff) / (2 * (3 * self.K_eff + self.G_eff)) - - self.kappa_eff = kappa_eff_numerator / kappa_eff_denominator - - # Mori-Tanaka factors for strength estimate - A_MT_K = 1 / K_eff_denominator - A_MT_G = 1 / G_eff_denominator - - ii = np.array( - [ - [1, 1, 1, 0, 0, 0], - [1, 1, 1, 0, 0, 0], - [1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - ] - ) - - Iv = 1 / 3 * ii - Id = np.eye(6) - Iv - A_MT = A_MT_K * Iv + A_MT_G * Id # reversing volumetric and deviatoric split - - # compute material stiffness matrix L_eff (elastic) and matrix - def L_from_k_and_g(k, g): - ii = np.array( - [ - [1, 1, 1, 0, 0, 0], - [1, 1, 1, 0, 0, 0], - [1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - ] - ) - - Iv = 1 / 3 * ii - Id = np.eye(6) - Iv - L = 3 * k * Iv + 2 * g * Id - - return L - - L_matrix = L_from_k_and_g(self.K_matrix, self.G_matrix) - L_eff = L_from_k_and_g(self.K_eff, self.G_eff) - - # todo continue with micro mech marble cement paper - - # calculation of compressive strength (J2 criterion for the matrix) - sigma_0 = np.zeros((3, 3)) - sigma_0[0][0] = self.fc_matrix - sigma_dev_0 = sigma_0 - 1 / 3 * np.trace(sigma_0) * np.eye(3) - j2_0 = np.sqrt(3 / 2 * sum(sum(np.multiply(sigma_dev_0, sigma_dev_0)))) - strength_test = 1e-5 # "test" stress to be scaled - sigma_test = np.array([[strength_test], [0], [0], [0], [0], [0]]) - - B_MT = np.dot(np.dot(L_matrix, A_MT), np.linalg.inv(L_eff)) - s0 = np.dot(B_MT, sigma_test) - stressInMatrix_tensor = np.array( - [[s0[0][0], s0[5][0], s0[4][0]], [s0[5][0], s0[1][0], s0[3][0]], [s0[4][0], s0[3][0], s0[2][0]]] - ) - sigma_dev_matrix = stressInMatrix_tensor - 1 / 3 * np.trace(stressInMatrix_tensor) * np.eye(3) - j2_matrix = np.sqrt(3 / 2 * sum(sum(np.multiply(sigma_dev_matrix, sigma_dev_matrix)))) - fc = j2_0 / j2_matrix * strength_test - self.fc_eff = fc - - # update heat release - self.Q_vol_eff = self.Q_matrix * self.rho_matrix * self.vol_frac_binder diff --git a/src/fenicsxconcrete/util/parameters.py b/src/fenicsxconcrete/util/parameters.py deleted file mode 100644 index 8b37eb2..0000000 --- a/src/fenicsxconcrete/util/parameters.py +++ /dev/null @@ -1,29 +0,0 @@ -from __future__ import annotations - -from collections import UserDict - -import pint - - -class Parameters(UserDict): - """ - A class that contains physical quantities for our model. Each new entry needs to be a pint quantity. - """ - - def __setitem__(self, key: str, value: pint.Quantity): - assert isinstance(value, pint.Quantity) - self.data[key] = value.to_base_units() - - def __add__(self, other: Parameters | None) -> Parameters: - if other is None: - dic = self - else: - dic = Parameters({**self, **other}) - return dic - - def to_magnitude(self) -> dict[str, int | str | float]: - magnitude_dictionary = {} - for key in self.keys(): - magnitude_dictionary[key] = self[key].magnitude - - return magnitude_dictionary diff --git a/src/fenicsxconcrete/util/projection.py b/src/fenicsxconcrete/util/projection.py deleted file mode 100644 index fed7338..0000000 --- a/src/fenicsxconcrete/util/projection.py +++ /dev/null @@ -1,32 +0,0 @@ -import dolfinx as df -import ufl - - -def project( - v: df.fem.Function | ufl.core.expr.Expr, V: df.fem.FunctionSpace, dx: ufl.Measure, u: df.fem.Function | None = None -) -> None | df.fem.Function: - """ - Calculates an approximation of `v` on the space `V` - - Args: - v: The expression that we want to evaluate. - V: The function space on which we want to evaluate. - dx: The measure that is used for the integration. This is important, if - either `V` is a quadrature space or `v` is a ufl expression containing a quadrature space. - u: The output function. - - Returns: - A function if `u` is None, otherwise `None`. - - """ - dv = ufl.TrialFunction(V) - v_ = ufl.TestFunction(V) - a_proj = ufl.inner(dv, v_) * dx - b_proj = ufl.inner(v, v_) * dx - if u is None: - solver = df.fem.petsc.LinearProblem(a_proj, b_proj) - uh = solver.solve() - return uh - else: - solver = df.fem.petsc.LinearProblem(a_proj, b_proj, u=u) - solver.solve() diff --git a/src/fenicsxconcrete/util/quadrature.py b/src/fenicsxconcrete/util/quadrature.py deleted file mode 100644 index 5421a7c..0000000 --- a/src/fenicsxconcrete/util/quadrature.py +++ /dev/null @@ -1,185 +0,0 @@ -import basix -import dolfinx as df -import numpy as np -import ufl - - -class QuadratureRule: - """ - An object that takes care of the creation of a quadrature rule and the creation of - quadrature spaces. - - Args: - type: The quadrature type. Examples are `basix.QuadratureType.Default` - for Gaussian quadrature and `basix.QuadratureType.gll` for Gauss-Lobatto quadrature. - cell_type: The type of FEM cell (`triangle, tetrahedron`,...). - degree: The maximal degree that the quadrature rule should be able to integrate. - - - Attributes: - type (basix.QuadratureType): The quadrature type. - cell_type (ufl.Cell): The type of FEM cell. - degree (int): The quadrature degree. - points (np.ndarray): The quadrature points on the refernce cell. - weights (np.ndarray): The weights of the quadrature rule. - dx (ufl.measure): The appropriate measure for integrating ufl forms - with the specified quadrature rule. **Always** use this measure - when integrating a form that includes a quadrature function. - - """ - - def __init__( - self, - type: basix.QuadratureType = basix.QuadratureType.Default, - cell_type: ufl.Cell = ufl.triangle, - degree: int = 1, - ): - self.type = type - self.cell_type = cell_type - self.degree = degree - basix_cell = _ufl_cell_type_to_basix(self.cell_type) - self.points, self.weights = basix.make_quadrature(self.type, basix_cell, self.degree) - self.dx = ufl.dx( - metadata={ - "quadrature_rule": self.type.name, - "quadrature_degree": self.degree, - } - ) - - def create_quadrature_space(self, mesh: df.mesh.Mesh) -> df.fem.FunctionSpace: - """ - Args: - mesh: The mesh on which we want to create the space. - - Returns: - A scalar quadrature `FunctionSpace` on `mesh`. - """ - assert mesh.ufl_cell() == self.cell_type - Qe = ufl.FiniteElement( - "Quadrature", - self.cell_type, - self.degree, - quad_scheme=self.type.name, - ) - - return df.fem.FunctionSpace(mesh, Qe) - - def create_quadrature_vector_space(self, mesh: df.mesh.Mesh, dim: int) -> df.fem.VectorFunctionSpace: - """ - Args: - mesh: The mesh on which we want to create the space. - dim: The dimension of the vector at each dof. - - Returns: - A vector valued quadrature `FunctionSpace` on `mesh`. - """ - assert mesh.ufl_cell() == self.cell_type - Qe = ufl.VectorElement( - "Quadrature", - self.cell_type, - self.degree, - quad_scheme=self.type.name, - dim=dim, - ) - - return df.fem.FunctionSpace(mesh, Qe) - - def create_quadrature_tensor_space(self, mesh: df.mesh.Mesh, shape: tuple[int, int]) -> df.fem.TensorFunctionSpace: - """ - Args: - mesh: The mesh on which we want to create the space. - shape: The shape of the tensor at each dof. - - Returns: - A tensor valued quadrature `FunctionSpace` on `mesh`. - """ - assert mesh.ufl_cell() == self.cell_type - Qe = ufl.TensorElement( - "Quadrature", - self.cell_type, - self.degree, - quad_scheme=self.type.name, - shape=shape, - ) - - return df.fem.FunctionSpace(mesh, Qe) - - def number_of_points(self, mesh: df.mesh.Mesh) -> int: - """ - Args: - mesh: A mesh. - Returns: - Number of quadrature points that the QuadratureRule would generate on `mesh` - """ - assert mesh.ufl_cell() == self.cell_type - - map_c = mesh.topology.index_map(mesh.topology.dim) - self.num_cells = map_c.size_local - return self.num_cells * self.weights.size - - def create_quadrature_array(self, mesh: df.mesh.Mesh, shape: int | tuple[int, int] = 1) -> np.ndarray: - """ - Creates array of a quadrature function without creating the function or the function space. - This should be used, if operations on quadrature points are needed, but not all values are needed - in a ufl form. - - Args: - mesh: A mesh. - shape: Local shape of the quadrature space. Example: `shape = 1` for Scalar, - `shape = (n, 1)` for vector and `shape = (n,n)` for Tensor. - Returns: - An array that is equivalent to `quadrature_function.vector.array`. - """ - n_points = self.number_of_points(mesh) - n_local = shape if isinstance(shape, int) else shape[0] * shape[1] - return np.zeros(n_points * n_local) - - -def _ufl_cell_type_to_basix(cell_type: ufl.Cell) -> basix.CellType: - conversion = { - ufl.interval: basix.CellType.interval, - ufl.triangle: basix.CellType.triangle, - ufl.tetrahedron: basix.CellType.tetrahedron, - ufl.quadrilateral: basix.CellType.quadrilateral, - ufl.hexahedron: basix.CellType.hexahedron, - } - return conversion[cell_type] - - -class QuadratureEvaluator: - """ - A class that evaluates a ufl expression on a quadrature space. - - Args: - ufl_expression: The ufl expression. - mesh: The mesh on which we want to evaluate `ufl_expression` - rule: The quadrature rule. - """ - - def __init__(self, ufl_expression: ufl.core.expr.Expr, mesh: df.mesh.Mesh, rule: QuadratureRule) -> None: - assert mesh.ufl_cell() == rule.cell_type - map_c = mesh.topology.index_map(mesh.topology.dim) - self.num_cells = map_c.size_local - - self.cells = np.arange(0, self.num_cells, dtype=np.int32) - - self.expr = df.fem.Expression(ufl_expression, rule.points) - - def evaluate(self, q: np.ndarray | df.fem.Function | None = None) -> np.ndarray | None: - """ - Evaluate the expression. - - Args: - q: The object we want to write the result to. - - Returns: - A numpy array with the values if `q` is `None`, otherwise the result is written - on `q` and `None` is returned. - """ - if q is None: - return self.expr.eval(self.cells) - elif isinstance(q, np.ndarray): - self.expr.eval(self.cells, values=q.reshape(self.num_cells, -1)) - elif isinstance(q, df.fem.Function): - self.expr.eval(self.cells, values=q.vector.array.reshape(self.num_cells, -1)) - q.x.scatter_forward() diff --git a/src/fenicsxconcrete/util/unit_registry.py b/src/fenicsxconcrete/util/unit_registry.py deleted file mode 100644 index bf5c5e6..0000000 --- a/src/fenicsxconcrete/util/unit_registry.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -This module contains the default unit registry that is used throughout the codebase. - - -Attributes: - ureg (pint.UnitRegistry): The default unit registry. - -""" - -import pint - -ureg = pint.UnitRegistry(cache_folder=":auto:") # initialize unit registry - -# user defined dimensions -ureg.define("[moment] = [force] * [length]") -ureg.define("[stress] = [force] / [length]**2") -ureg.define("GWP = [global_warming_potential] = kg_CO2_eq = kg_C02_equivalent = kg_C02eq") diff --git a/tests/README.md b/tests/README.md deleted file mode 100644 index 8725577..0000000 --- a/tests/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Testfiles - -Please make sure to keep the same structure as in the package \ No newline at end of file diff --git a/tests/boundary_conditions/test_bcs_get_boundary_dofs.py b/tests/boundary_conditions/test_bcs_get_boundary_dofs.py deleted file mode 100644 index 92c4428..0000000 --- a/tests/boundary_conditions/test_bcs_get_boundary_dofs.py +++ /dev/null @@ -1,92 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -import numpy as np -from mpi4py import MPI - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions, get_boundary_dofs -from fenicsxconcrete.boundary_conditions.boundary import plane_at - - -def num_square_boundary_dofs(n: int, deg: int, dim: int, num_edges: int = 4) -> int: - """returns number of dofs for a square - assumes quadrilateral cells and structured grid - - there are (n+1) * num_edges - num_edges points - and if degree == 2: additional n*num_edges dofs (edges) - thus n * num_edges * degree dofs for degree in (1, 2) - times number of components (i.e. dim) - """ - return num_edges * n * deg * dim - - -def num_square_dofs(ncells: int, deg: int, dim: int) -> int: - if deg == 1: - n = ncells + 1 - elif deg == 2: - n = 2 * ncells + 1 - return n**2 * dim - - -def test_whole_boundary() -> None: - """test for bcs on ∂Ω - - compare options - (a) usage of BoundaryConditions - (b) helper function `get_boundary_dofs` - - The difference is that in (a) it is done topologically, - whereas in (b) the input is *any* callable (geometrical marker). - """ - - n = 8 - degree = 2 - dim = 2 - - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", degree), dim=dim) - - # option (a) - bc_handler = BoundaryConditions(domain, V) - boundary_facets = dolfinx.mesh.exterior_facet_indices(domain.topology) - u = dolfinx.fem.Function(V) - u.x.set(0.0) - bc_handler.add_dirichlet_bc(u, boundary_facets, method="topological", entity_dim=1) - bcs = bc_handler.bcs - dofs = bcs[0].dof_indices()[0] - assert dofs.size == num_square_boundary_dofs(n, degree, dim) - - def everywhere(x): - return np.full(x[0].shape, True, dtype=bool) - - # option (b) - dofs = get_boundary_dofs(V, everywhere) - assert dofs.size == num_square_boundary_dofs(n, degree, dim) - - -def test_xy_plane() -> None: - n = 4 - degree = 2 - dim = 3 - - domain = dolfinx.mesh.create_unit_cube(MPI.COMM_WORLD, n, n, n, dolfinx.mesh.CellType.hexahedron) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", degree), dim=dim) - xy_plane = plane_at(0.0, "z") - - # option (a) - bc_handler = BoundaryConditions(domain, V) - u = dolfinx.fem.Function(V) - u.x.set(0.0) - bc_handler.add_dirichlet_bc(u, xy_plane, method="geometrical") - bcs = bc_handler.bcs - dofs = bcs[0].dof_indices()[0] - assert dofs.size == num_square_dofs(n, degree, dim) - - # option (b) - dofs = get_boundary_dofs(V, xy_plane) - assert dofs.size == num_square_dofs(n, degree, dim) - - -if __name__ == "__main__": - test_whole_boundary() - test_xy_plane() diff --git a/tests/boundary_conditions/test_boundary_create_facet_tags.py b/tests/boundary_conditions/test_boundary_create_facet_tags.py deleted file mode 100644 index fb1fa9e..0000000 --- a/tests/boundary_conditions/test_boundary_create_facet_tags.py +++ /dev/null @@ -1,22 +0,0 @@ -import dolfinx -from mpi4py import MPI - -from fenicsxconcrete.boundary_conditions.boundary import create_facet_tags, plane_at - - -def test_create_facet_tags() -> None: - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, 8, 8, dolfinx.mesh.CellType.triangle) - to_be_marked = {"bottom": (4, plane_at(0.0, "y")), "right": (5, plane_at(1.0, "x"))} - ft, marked = create_facet_tags(domain, to_be_marked) - ft_bottom = ft.find(4) - ft_right = ft.find(5) - assert ft_bottom.size == 8 - assert ft_right.size == 8 - assert "bottom" in marked.keys() - assert "right" in marked.keys() - assert 4 in marked.values() - assert 5 in marked.values() - - -if __name__ == "__main__": - test_create_facet_tags() diff --git a/tests/boundary_conditions/test_boundary_line_at.py b/tests/boundary_conditions/test_boundary_line_at.py deleted file mode 100644 index 4235fe3..0000000 --- a/tests/boundary_conditions/test_boundary_line_at.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -import numpy as np -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.boundary import line_at, plane_at - - -def test_cube() -> None: - n = 4 - domain = dolfinx.mesh.create_unit_cube(MPI.COMM_WORLD, n, n, n, dolfinx.mesh.CellType.hexahedron) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - - x_axis = line_at([0, 0], ["z", "y"]) - y_axis = line_at([0, 0], ["z", "x"]) - z_axis = line_at([0, 0], ["x", "y"]) - - axis_list = [x_axis, y_axis, z_axis] - - for axis in axis_list: - dofs = dolfinx.fem.locate_dofs_geometrical(V, x_axis) - nodal_value = 42 - bc = dolfinx.fem.dirichletbc(ScalarType(nodal_value), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == (n + 1) - assert bc.g.value == nodal_value diff --git a/tests/boundary_conditions/test_boundary_plane_at.py b/tests/boundary_conditions/test_boundary_plane_at.py deleted file mode 100644 index 9214d28..0000000 --- a/tests/boundary_conditions/test_boundary_plane_at.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -import numpy as np -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.boundary import plane_at - - -def test_square() -> None: - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, 8, 8, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - - bottom = plane_at(0.0, "y") - top = plane_at(1.0, "y") - left = plane_at(0.0, "x") - right = plane_at(1.0, "x") - - dofs = dolfinx.fem.locate_dofs_geometrical(V, bottom) - bc = dolfinx.fem.dirichletbc(ScalarType(42), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 9 - assert bc.g.value == 42 - - def on_boundary(x): - return np.logical_or(top(x), bottom(x)) - - dofs = dolfinx.fem.locate_dofs_geometrical(V, on_boundary) - bc = dolfinx.fem.dirichletbc(ScalarType(17), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 9 * 2 - assert bc.g.value == 17 - - def origin(x): - return np.logical_and(left(x), bottom(x)) - - dofs = dolfinx.fem.locate_dofs_geometrical(V, origin) - bc = dolfinx.fem.dirichletbc(ScalarType(21), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 1 - assert bc.g.value == 21 - - def l_shaped_boundary(x): - return np.logical_or(top(x), right(x)) - - facet_dim = 1 - boundary_facets = dolfinx.mesh.locate_entities_boundary(domain, facet_dim, l_shaped_boundary) - boundary_dofs = dolfinx.fem.locate_dofs_topological(V, facet_dim, boundary_facets) - bc = dolfinx.fem.dirichletbc(ScalarType(666), boundary_dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 17 - assert bc.g.value == 666 - - -def test_cube() -> None: - nx, ny, nz = 4, 4, 4 - domain = dolfinx.mesh.create_unit_cube(MPI.COMM_WORLD, nx, ny, nz, dolfinx.mesh.CellType.hexahedron) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - - xy_plane = plane_at(0.0, "z") - dofs = dolfinx.fem.locate_dofs_geometrical(V, xy_plane) - nodal_value = 42 - bc = dolfinx.fem.dirichletbc(ScalarType(nodal_value), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == (nx + 1) * (ny + 1) - assert bc.g.value == nodal_value - - -if __name__ == "__main__": - test_square() - test_cube() diff --git a/tests/boundary_conditions/test_boundary_point_at.py b/tests/boundary_conditions/test_boundary_point_at.py deleted file mode 100644 index 9ed2561..0000000 --- a/tests/boundary_conditions/test_boundary_point_at.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -import numpy as np -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.boundary import point_at - - -def test_type_error() -> None: - """test TypeError in conversion to float""" - n = 10 - domain = dolfinx.mesh.create_interval(MPI.COMM_WORLD, n, [0.0, 10.0]) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - x = point_at(5) - dofs = dolfinx.fem.locate_dofs_geometrical(V, x) - nodal_value = 42 - bc = dolfinx.fem.dirichletbc(ScalarType(nodal_value), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 1 - assert bc.g.value == nodal_value - - -def test_function_space() -> None: - n = 101 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - - h = 1.0 / n - my_point = point_at(np.array([h * 2, h * 5])) - - dofs = dolfinx.fem.locate_dofs_geometrical(V, my_point) - bc = dolfinx.fem.dirichletbc(ScalarType(42), dofs, V) - ndofs = bc.dof_indices()[1] - assert ndofs == 1 - assert bc.g.value == 42 - - -def test_vector_function_space() -> None: - n = 101 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 2)) - - # note the inconsistency in specifying the coordinates - # this is handled by `to_floats` - points = [0, [1.0], [0.0, 1.0], [1.0, 1.0, 0.0]] - nodal_dofs = np.array([], dtype=np.int32) - for x in points: - dofs = dolfinx.fem.locate_dofs_geometrical(V, point_at(x)) - bc = dolfinx.fem.dirichletbc(np.array([0, 0], dtype=ScalarType), dofs, V) - nodal_dofs = np.append(nodal_dofs, bc.dof_indices()[0]) - assert nodal_dofs.size == 8 - - -if __name__ == "__main__": - test_function_space() - test_vector_function_space() - test_type_error() diff --git a/tests/boundary_conditions/test_boundary_within_range.py b/tests/boundary_conditions/test_boundary_within_range.py deleted file mode 100644 index 176bceb..0000000 --- a/tests/boundary_conditions/test_boundary_within_range.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.boundary import within_range - - -def test_1d() -> None: - n = 10 - domain = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, n) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - # test reordering of start and end - subdomain = within_range([0.75, 0.0, 0.0], [0.35, 0.0, 0.0]) - dofs = dolfinx.fem.locate_dofs_geometrical(V, subdomain) - bc = dolfinx.fem.dirichletbc(ScalarType(12), dofs, V) - num_dofs = bc.dof_indices()[1] - assert num_dofs == 4 - - -def test_2d() -> None: - n = 200 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 1)) - - Δx = Δy = 1 / (n + 1) # exclude the right and top boundary, Δ must be smaller than cell size - boundary = within_range([0.0, 0.0, 0.0], [1.0 - Δx, 1.0 - Δy, 0.0]) - - facet_dim = 1 - boundary_facets = dolfinx.mesh.locate_entities_boundary(domain, facet_dim, boundary) - boundary_dofs = dolfinx.fem.locate_dofs_topological(V, facet_dim, boundary_facets) - bc = dolfinx.fem.dirichletbc(ScalarType(666), boundary_dofs, V) - ndofs = bc.dof_indices()[1] - # n_boundary_dofs = (n-1) * 4 + 4 - expected = (n - 1) * 2 + 1 - assert ndofs == expected - - -if __name__ == "__main__": - test_1d() - test_2d() diff --git a/tests/boundary_conditions/test_dirichlet_bcs.py b/tests/boundary_conditions/test_dirichlet_bcs.py deleted file mode 100644 index e201c71..0000000 --- a/tests/boundary_conditions/test_dirichlet_bcs.py +++ /dev/null @@ -1,238 +0,0 @@ -"""Based on Philipp Diercks implementation for multi""" - -import dolfinx -import numpy as np -import pytest -from mpi4py import MPI -from petsc4py.PETSc import ScalarType - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.boundary_conditions.boundary import create_facet_tags, plane_at - -"""Note: topological vs. geometrical - -It seems that `locate_dofs_geometrical` does not work with V.sub -since at some point the dof coordinates need to be tabulated -which is not possible for a subspace. -However, one could always first locate the entities geometrically -if this is more convenient. - -```python -from dolfinx.fem import dirichletbc -from dolfinx.mesh import locate_entities_boundary, locate_dofs_topological - -def plane_at(coordinate, dim): - - def boundary(x): - return np.isclose(x[dim], coordinate) - - return boundary - -bottom = plane_at(0., 1) - -bottom_boundary_facets = locate_entities_boundary( - domain, domain.topology.dim - 1, bottom -) -bottom_boundary_dofs_y = locate_dofs_topological( - V.sub(1), domain.topology.dim - 1, bottom_boundary_facets -) -fix_uy = dirichletbc(ScalarType(0), bottom_boundary_dofs_y, V.sub(1)) -``` - -""" - - -def test_vector_geom() -> None: - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, 8, 8, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 2)) - - bc_handler = BoundaryConditions(domain, V) - - def left(x): - return np.isclose(x[0], 0.0) - - tdim = domain.topology.dim - fdim = tdim - 1 - domain.topology.create_connectivity(fdim, tdim) - - # entire boundary; should have 64 * 2 dofs - # constrain entire boundary only for the x-component - boundary_facets = dolfinx.mesh.exterior_facet_indices(domain.topology) - bc_handler.add_dirichlet_bc(ScalarType(0), boundary_facets, sub=0, method="topological", entity_dim=fdim) - # constrain left boundary as well - zero = np.array([0.0, 0.0], dtype=ScalarType) - bc_handler.add_dirichlet_bc(zero, left, method="geometrical") - - # use a Constant and constrain same dofs again for fun - bc_handler.add_dirichlet_bc( - dolfinx.fem.Constant(domain, ScalarType(0.0)), - boundary_facets, - sub=0, - entity_dim=fdim, - ) - - bcs = bc_handler.bcs - ndofs = 0 - for bc in bcs: - ndofs += bc.dof_indices()[1] - - assert ndofs == 64 + 34 + 64 - - -def test_vector_geom_component_wise() -> None: - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, 8, 8, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 2)) - - bc_handler = BoundaryConditions(domain, V) - - def left(x): - return np.isclose(x[0], 0.0) - - tdim = domain.topology.dim - fdim = tdim - 1 - domain.topology.create_connectivity(fdim, tdim) - - zero = ScalarType(0.0) - bc_handler.add_dirichlet_bc(zero, left, method="geometrical", sub=0, entity_dim=fdim) - - bcs = bc_handler.bcs - ndofs = 0 - for bc in bcs: - ndofs += bc.dof_indices()[1] - - assert ndofs == 17 - - -def test_scalar_geom() -> None: - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, 8, 8) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - - bc_handler = BoundaryConditions(domain, V) - - def left(x): - return np.isclose(x[0], 0.0) - - bc_handler.add_dirichlet_bc(ScalarType(0), left, method="geometrical") - - bcs = bc_handler.bcs - my_bc = bcs[0] - - ndofs = my_bc.dof_indices()[1] - all_ndofs = domain.comm.allreduce(ndofs, op=MPI.SUM) - assert all_ndofs == 17 - assert my_bc.g.value == 0.0 - - -def test_scalar_topo() -> None: - n = 20 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - - bc_handler = BoundaryConditions(domain, V) - - tdim = domain.topology.dim - fdim = tdim - 1 - domain.topology.create_connectivity(fdim, tdim) - - # entire boundary; should have (n+1+n)*4 - 4 = 8n dofs - boundary_facets = dolfinx.mesh.exterior_facet_indices(domain.topology) - bc_handler.add_dirichlet_bc(ScalarType(0), boundary_facets, entity_dim=fdim) - - bcs = bc_handler.bcs - my_bc = bcs[0] - - ndofs = my_bc.dof_indices()[1] - all_ndofs = domain.comm.allreduce(ndofs, op=MPI.SUM) - assert all_ndofs == 8 * n - assert my_bc.g.value == 0.0 - - -def test_dirichletbc() -> None: - """add instance of dolfinx.fem.dirichletbc""" - n = 20 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - bc_handler = BoundaryConditions(domain, V) - dofs = dolfinx.fem.locate_dofs_geometrical(V, plane_at(0.0, "x")) - bc = dolfinx.fem.dirichletbc(ScalarType(0), dofs, V) - assert not bc_handler.has_dirichlet - bc_handler.add_dirichlet_bc(bc) - assert bc_handler.has_dirichlet - - -def test_runtimeerror_geometrical() -> None: - """test method geometrical raises RuntimeError if sub - is not None""" - n = 20 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 2)) - Vsub = V.sub(0) - bottom = plane_at(0.0, "y") - with pytest.raises(RuntimeError): - dolfinx.fem.locate_dofs_geometrical(Vsub, bottom) - - -def test_boundary_as_int() -> None: - n = 5 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 2)) - marker = 1011 - bottom = {"bottom": (marker, plane_at(0.0, "y"))} - ft, marked = create_facet_tags(domain, bottom) - - bch_wo_ft = BoundaryConditions(domain, V) - bc_handler = BoundaryConditions(domain, V, facet_tags=ft) - - zero = ScalarType(0.0) - with pytest.raises(AttributeError): - bch_wo_ft.add_dirichlet_bc(zero, boundary=0, sub=0, entity_dim=1) - with pytest.raises(ValueError): - bc_handler.add_dirichlet_bc(zero, boundary=0, sub=0, entity_dim=1) - assert not bc_handler.has_dirichlet - bc_handler.add_dirichlet_bc(zero, boundary=marker, sub=0, entity_dim=1) - assert bc_handler.has_dirichlet - - -def test_value_interpolation() -> None: - n = 50 - domain = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, n) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - my_value = 17.2 - - def expression(x): - return np.ones_like(x[0]) * my_value - - def everywhere(x): - return np.full(x[0].shape, True, dtype=bool) - - bc_handler = BoundaryConditions(domain, V) - bc_handler.add_dirichlet_bc(expression, everywhere, entity_dim=0) - bc = bc_handler.bcs[0] - dofs = bc.dof_indices()[0] - assert np.allclose(np.ones_like(dofs) * my_value, bc.g.x.array[dofs]) - - -def test_clear() -> None: - n = 2 - domain = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, n) - V = dolfinx.fem.FunctionSpace(domain, ("Lagrange", 2)) - bc_handler = BoundaryConditions(domain, V) - dofs = dolfinx.fem.locate_dofs_geometrical(V, plane_at(0.0, "x")) - bc = dolfinx.fem.dirichletbc(ScalarType(0), dofs, V) - assert not bc_handler.has_dirichlet - bc_handler.add_dirichlet_bc(bc) - assert bc_handler.has_dirichlet - bc_handler.clear(neumann=False) - assert not bc_handler.has_dirichlet - - -if __name__ == "__main__": - test_scalar_geom() - test_scalar_topo() - test_vector_geom() - test_vector_geom_component_wise() - test_dirichletbc() - test_runtimeerror_geometrical() - test_boundary_as_int() - test_value_interpolation() - test_clear() diff --git a/tests/boundary_conditions/test_neumann_bcs.py b/tests/boundary_conditions/test_neumann_bcs.py deleted file mode 100644 index 1ba16f1..0000000 --- a/tests/boundary_conditions/test_neumann_bcs.py +++ /dev/null @@ -1,46 +0,0 @@ -import dolfinx -import numpy as np -import pytest -from mpi4py import MPI -from petsc4py import PETSc - -from fenicsxconcrete.boundary_conditions.bcs import BoundaryConditions -from fenicsxconcrete.boundary_conditions.boundary import create_facet_tags, plane_at - - -def test_constant_traction() -> None: - n = 10 - domain = dolfinx.mesh.create_unit_square(MPI.COMM_WORLD, n, n, dolfinx.mesh.CellType.quadrilateral) - V = dolfinx.fem.VectorFunctionSpace(domain, ("Lagrange", 1)) - rmarker = 12 - my_boundaries = {"right": (rmarker, plane_at(0.0, "x"))} - ft, mb = create_facet_tags(domain, my_boundaries) - bch = BoundaryConditions(domain, V, facet_tags=ft) - - tmax = 234.0 - traction = dolfinx.fem.Constant(domain, PETSc.ScalarType((tmax, 0.0))) - assert not bch.has_neumann - bch.add_neumann_bc(rmarker, traction) - assert bch.has_neumann - - rhs = bch.neumann_bcs - form = dolfinx.fem.form(rhs) - vector = dolfinx.fem.petsc.create_vector(form) - - with vector.localForm() as v_loc: - v_loc.set(0) - dolfinx.fem.petsc.assemble_vector(vector, form) - vector.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE) - f_ext = np.sum(vector[:]) - assert np.isclose(tmax, f_ext) - - bch.clear(dirichlet=False) - assert not bch.has_neumann - - # try to add non-existent marker - with pytest.raises(ValueError): - bch.add_neumann_bc(666, traction) - - -if __name__ == "__main__": - test_constant_traction() diff --git a/tests/experimental_setup/test_experimental_setups.py b/tests/experimental_setup/test_experimental_setups.py deleted file mode 100644 index 82e5981..0000000 --- a/tests/experimental_setup/test_experimental_setups.py +++ /dev/null @@ -1,110 +0,0 @@ -import copy - -import pint -import pytest - -from fenicsxconcrete.experimental_setup.am_multiple_layers import AmMultipleLayers -from fenicsxconcrete.experimental_setup.base_experiment import Experiment -from fenicsxconcrete.experimental_setup.cantilever_beam import CantileverBeam -from fenicsxconcrete.experimental_setup.compression_cylinder import CompressionCylinder -from fenicsxconcrete.experimental_setup.simple_beam import SimpleBeam -from fenicsxconcrete.experimental_setup.simple_cube import SimpleCube -from fenicsxconcrete.experimental_setup.tensile_beam import TensileBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.util import ureg - -# # makes no sense anymore since the default parameter are used in each experiment or material problem -# # plus was not tested since twice the same names!! -# @pytest.mark.parametrize( -# "setup", -# [ -# CantileverBeam, -# TensileBeam, -# SimpleBeam, -# CompressionCylinder, -# AmMultipleLayers, -# SimpleCube, -# ], -# ) -# def test_default_parameters(setup: Experiment) -> None: -# """This function creates experimental setups with the respective default dictionaries -# -# This makes sure all relevant values are included""" -# default_material = LinearElasticity -# -# setup_parameters = setup.default_parameters() -# -# # initialize with default parameters -# experiment = setup(setup_parameters) -# -# # test that each parameter is truly required -# for key in setup_parameters: -# with pytest.raises(KeyError): -# less_parameters = copy.deepcopy(setup_parameters) -# less_parameters.pop(key) -# experiment = setup(less_parameters) -# fem_problem = default_material(experiment, default_material.default_parameters()[1]) -# fem_problem.solve() - - -@pytest.mark.parametrize( - "setup", - [ - CantileverBeam, - TensileBeam, - SimpleBeam, - CompressionCylinder, - AmMultipleLayers, - SimpleCube, - ], -) -def test_default_parameters(setup: Experiment) -> None: - """This function tests if the default_parameters are complete""" - # default_material = LinearElasticity - - setup_parameters = setup.default_parameters() - - try: - experiment = setup(setup_parameters) - except KeyError: - print("default parameter dictionary is wrong") - raise ValueError - - -# to imporve coverage, I want to test the error messages -@pytest.mark.parametrize( - "setup", - [ - CantileverBeam, - TensileBeam, - SimpleBeam, - CompressionCylinder, - AmMultipleLayers, - SimpleCube, - ], -) -def test_default_parameters(setup: Experiment) -> None: - setup_parameters = setup.default_parameters() - - with pytest.raises(ValueError): - setup_parameters["dim"] = 4 * ureg("") # there is no 4D setup - test_setup = setup(setup_parameters) - - -@pytest.mark.parametrize( - "setup", - [ - CantileverBeam, - TensileBeam, - SimpleBeam, - CompressionCylinder, - AmMultipleLayers, - SimpleCube, - ], -) -def test_dimensionality_check(setup: Experiment) -> None: - setup_parameters = setup.default_parameters() - - with pytest.raises(ValueError): - setup_parameters["dim"] = 3 * ureg("s") # dimension should be dimensionless - test_setup = setup(setup_parameters) diff --git a/tests/finite_element_problem/_generate_thermo_mechanical_data_fenics_concrete.py b/tests/finite_element_problem/_generate_thermo_mechanical_data_fenics_concrete.py deleted file mode 100644 index 70c56c9..0000000 --- a/tests/finite_element_problem/_generate_thermo_mechanical_data_fenics_concrete.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -This file needs to run in an environment with version -(commit 13486645b01665b4da248edd268b1904b0b5b745 (HEAD -> master, tag: v0.9.0, origin/master, origin/HEAD)) -of FenicsConcrete https://github.com/BAMresearch/FenicsConcrete - -This file should not run during tests. It is only in this directory in order to generate data that will be compared -to the new implementation of the thermo mechanical model. -""" - -try: - import fenics_concrete -except ModuleNotFoundError: - raise ModuleNotFoundError( - """This file needs to run in an environment with version - (commit 13486645b01665b4da248edd268b1904b0b5b745 (HEAD -> master, tag: v0.9.0, origin/master, origin/HEAD)) - of FenicsConcrete""" - ) -from pathlib import Path - -import numpy as np - -parameters = fenics_concrete.Parameters() # using the current default values -# general -parameters["log_level"] = "WARNING" -# mesh -parameters["mesh_setting"] = "left/right" # default boundary setting -parameters["dim"] = 3 -parameters["mesh_density"] = 2 -# temperature boundary -parameters["bc_setting"] = "full" # default boundary setting -parameters["T_0"] = 20 # inital concrete temperature -parameters["T_bc1"] = 20 # temperature boundary value 1 - -parameters["density"] = 2350 # in kg/m^3 density of concrete -parameters["density_binder"] = 1440 # in kg/m^3 density of the binder -parameters["themal_cond"] = 2.0 # effective thermal conductivity, approx in Wm^-3K^-1, concrete! -# self.specific_heat_capacity = 9000 # effective specific heat capacity in J kg⁻1 K⁻1 -parameters["vol_heat_cap"] = 2.4e6 # volumetric heat cap J/(m3 K) -parameters["b_ratio"] = 0.2 # volume percentage of binder -parameters["Q_pot"] = 500e3 # potential heat per weight of binder in J/kg -# p['Q_inf'] = self.Q_pot * self.density_binder * self.b_ratio # potential heat per concrete volume in J/m3 -parameters["B1"] = 2.916e-4 # in 1/s -parameters["B2"] = 0.0024229 # - -parameters["eta"] = 5.554 # something about diffusion -parameters["alpha_max"] = 0.87 # also possible to approximate based on equation with w/c -parameters["alpha_tx"] = 0.68 # also possible to approximate based on equation with w/c -parameters["E_act"] = 5653 * 8.3145 # activation energy in Jmol^-1 -parameters["T_ref"] = 25 # reference temperature in degree celsius -# setting for temperature adjustment -parameters["temp_adjust_law"] = "exponential" -# polinomial degree -parameters["degree"] = 2 # default boundary setting -### paramters for mechanics problem -parameters["E"] = 42000000 # Youngs Modulus N/m2 or something... -parameters["nu"] = 0.2 # Poissons Ratio -# required paramters for alpha to E mapping -parameters["alpha_t"] = 0.2 -parameters["alpha_0"] = 0.05 -parameters["a_E"] = 0.6 -# required paramters for alpha to tensile and compressive stiffness mapping -parameters["fc"] = 6210000 -parameters["a_fc"] = 1.2 -parameters["ft"] = 467000 -parameters["a_ft"] = 1.0 - -experiment = fenics_concrete.ConcreteCubeExperiment(parameters) -problem = fenics_concrete.ConcreteThermoMechanical(experiment=experiment, parameters=parameters, vmapoutput=False) - - -E_sensor = fenics_concrete.sensors.YoungsModulusSensor((0.25, 0.25, 0.25)) -fc_sensor = fenics_concrete.sensors.CompressiveStrengthSensor((0.25, 0.25, 0.25)) -doh_sensor = fenics_concrete.sensors.DOHSensor((0.25, 0.25, 0.25)) -# t_sensor = fenics_concrete.sensors.TemperatureSensor((0.25, 0.25)) - - -problem.add_sensor(E_sensor) -problem.add_sensor(fc_sensor) -problem.add_sensor(doh_sensor) -# problem.add_sensor(t_sensor) - -# data for time stepping -dt = 3600 # 60 min step - -# set time step -problem.set_timestep(dt) # for time integration scheme - -# print(problem.p) -# initialize time -t = dt # first time step time -t_list = [] -u_list = [] -temperature_list = [] -doh = 0 - -# import matplotlib.pyplot as plt -# delta_alpha = np.linspace(0,0.006, 1000) - -# plt.plot(delta_alpha,[problem.temperature_problem.delta_alpha_fkt(delta_alpha_i, 0., 293.15) for delta_alpha_i in delta_alpha]) -# plt.show() - -while doh < parameters["alpha_tx"]: # time - # solve temp-hydration-mechanics - print("solving at t=", t) - problem.solve(t=t) # solving this - t_list.append(t) - u_list.append(problem.displacement.vector().get_local()) - temperature_list.append(problem.temperature.vector().get_local()) - # prepare next timestep - t += dt - # import sys - # sys.exit() - - # get last measure value - doh = problem.sensors[doh_sensor.name].data[-1] -dof_map_u = problem.displacement.function_space().tabulate_dof_coordinates() -dof_map_t = problem.temperature.function_space().tabulate_dof_coordinates() -np.savez( - Path(__file__).parent / "fenics_concrete_thermo_mechanical.npz", - t=np.array(t_list), - u=np.array(u_list), - T=np.array(temperature_list), - dof_map_u=dof_map_u, - dof_map_t=dof_map_t, - E=E_sensor.data, - fc=fc_sensor.data, - doh=doh_sensor.data, -) diff --git a/tests/finite_element_problem/fenics_concrete_thermo_mechanical.npz b/tests/finite_element_problem/fenics_concrete_thermo_mechanical.npz deleted file mode 100644 index 2957b06..0000000 Binary files a/tests/finite_element_problem/fenics_concrete_thermo_mechanical.npz and /dev/null differ diff --git a/tests/finite_element_problem/test_am_layers.py b/tests/finite_element_problem/test_am_layers.py deleted file mode 100644 index 74dddde..0000000 --- a/tests/finite_element_problem/test_am_layers.py +++ /dev/null @@ -1,315 +0,0 @@ -import os -from pathlib import Path - -import dolfinx as df -import numpy as np -import pytest - -from fenicsxconcrete.experimental_setup import AmMultipleLayers -from fenicsxconcrete.finite_element_problem import ConcreteAM, ConcreteThixElasticModel -from fenicsxconcrete.sensor_definition.reaction_force_sensor import ReactionForceSensor -from fenicsxconcrete.sensor_definition.strain_sensor import StrainSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor -from fenicsxconcrete.util import Parameters, QuadratureEvaluator, ureg - - -def set_test_parameters(dim: int, mat_type: str = "thix") -> Parameters: - """set up a test parameter set - - Args: - dim: dimension of problem - - Returns: filled instance of Parameters - - """ - setup_parameters = {} - - setup_parameters["dim"] = dim * ureg("") - # setup_parameters["stress_state"] = "plane_strain" - setup_parameters["num_layers"] = 5 * ureg("") # changed in single layer test!! - setup_parameters["layer_height"] = 1 / 100 * ureg("m") # y (2D), z (3D) - setup_parameters["layer_length"] = 50 / 100 * ureg("m") # x - setup_parameters["layer_width"] = 5 / 100 * ureg("m") # y (3D) - - setup_parameters["num_elements_layer_length"] = 10 * ureg("") - setup_parameters["num_elements_layer_height"] = 1 * ureg("") - setup_parameters["num_elements_layer_width"] = 2 * ureg("") - - if dim == 2: - setup_parameters["stress_state"] = "plane_stress" * ureg("") - - # default material parameters from material problem - - if dim == 3: - setup_parameters["q_degree"] = 4 * ureg("") - - return setup_parameters - - -@pytest.mark.parametrize("dimension", [2, 3]) -@pytest.mark.parametrize("factor", [1, 2]) -def test_am_single_layer(dimension: int, factor: int) -> None: - """single layer test - - one layer build immediately and lying for a given time - - Args: - dimension: dimension - factor: length of load_time = factor * dt - """ - - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_am_single_layer_{dimension}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exisits (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - # defining parameters - setup_parameters = set_test_parameters(dimension) - setup_parameters["num_layers"] = 1 * ureg("") - - # solving parameters - solve_parameters = {} - solve_parameters["time"] = 6 * 60 * ureg("s") - - # defining different loading - setup_parameters["dt"] = 60 * ureg("s") - setup_parameters["load_time"] = factor * setup_parameters["dt"] # interval where load is applied linear over time - - # setting up the problem - experiment = AmMultipleLayers(setup_parameters) - - # problem = LinearElasticity(experiment, setup_parameters) - problem = ConcreteAM( - experiment, setup_parameters, nonlinear_problem=ConcreteThixElasticModel, pv_name=file_name, pv_path=data_path - ) - problem.add_sensor(ReactionForceSensor()) - problem.add_sensor(StressSensor([problem.p["layer_length"] / 2, 0, 0])) - problem.add_sensor(StrainSensor([problem.p["layer_length"] / 2, 0, 0])) - - E_o_time = [] - total_time = 6 * 60 * ureg("s") - while problem.time <= total_time.to_base_units().magnitude: - problem.solve() - problem.pv_plot() - print("computed disp", problem.time, problem.fields.displacement.x.array[:].max()) - # # store Young's modulus over time - E_o_time.append(problem.youngsmodulus.vector.array[:].max()) - - # check reaction force - force_bottom_y = np.array(problem.sensors["ReactionForceSensor"].data)[:, -1] - dead_load = ( - problem.p["g"] - * problem.p["rho"] - * problem.p["layer_length"] - * problem.p["num_layers"] - * problem.p["layer_height"] - ) - if dimension == 2: - dead_load *= 1 # m - elif dimension == 3: - dead_load *= problem.p["layer_width"] - - # dead load of full structure - print("Check", force_bottom_y, dead_load) - assert sum(force_bottom_y) == pytest.approx(-dead_load) - - # check stresses change according to Emodul change - sig_o_time = np.array(problem.sensors["StressSensor"].data)[:, -1] - eps_o_time = np.array(problem.sensors["StrainSensor"].data)[:, -1] - - if factor == 1: - # instance loading -> no changes - assert sum(np.diff(sig_o_time)) == pytest.approx(0, abs=1e-8) - assert sum(np.diff(eps_o_time)) == pytest.approx(0, abs=1e-8) - elif factor == 2: - # ratio sig/eps t=0 to sig/eps t=0+dt - E_ratio_computed = (sig_o_time[0] / eps_o_time[0]) / (np.diff(sig_o_time)[0] / np.diff(eps_o_time)[0]) - assert E_ratio_computed == pytest.approx(E_o_time[0] / E_o_time[1]) - # after second time step nothing should change anymore - assert sum(np.diff(sig_o_time)[factor - 1 : :]) == pytest.approx(0, abs=1e-8) - assert sum(np.diff(eps_o_time)[factor - 1 : :]) == pytest.approx(0, abs=1e-8) - - -@pytest.mark.parametrize("dimension", [2]) -@pytest.mark.parametrize("mat", ["thix"]) # visco will be added next time -def test_am_multiple_layer(dimension: int, mat: str, plot: bool = False) -> None: - """multiple layer test - - several layers building over time one layer at once - - Args: - dimension: dimension - - """ - - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_am_multiple_layer_{dimension}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exists (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - # defining parameters - setup_parameters = set_test_parameters(dimension, mat_type=mat) - - # defining different loading - time_layer = 20 * ureg("s") # time to build one layer - setup_parameters["dt"] = time_layer / 4 - setup_parameters["load_time"] = 2 * setup_parameters["dt"] # interval where load is applied linear over time - - # setting up the problem - experiment = AmMultipleLayers(setup_parameters) - if mat.lower() == "thix": - problem = ConcreteAM( - experiment, - setup_parameters, - nonlinear_problem=ConcreteThixElasticModel, - pv_name=file_name, - pv_path=data_path, - ) - else: - print(f"nonlinear problem {mat} not yet implemented") - - # initial path function describing layer activation - path_activation = define_path( - problem, time_layer.magnitude, t_0=-(setup_parameters["num_layers"].magnitude - 1) * time_layer.magnitude - ) - problem.set_initial_path(path_activation) - - problem.add_sensor(ReactionForceSensor()) - problem.add_sensor(StressSensor([problem.p["layer_length"] / 2, 0, 0])) - problem.add_sensor(StrainSensor([problem.p["layer_length"] / 2, 0, 0])) - - total_time = setup_parameters["num_layers"] * time_layer - while problem.time <= total_time.to_base_units().magnitude: - problem.solve() - problem.pv_plot() - print("computed disp", problem.time, problem.fields.displacement.x.array[:].max()) - - # check residual force bottom - force_bottom_y = np.array(problem.sensors["ReactionForceSensor"].data)[:, -1] - print("force", force_bottom_y) - dead_load = ( - problem.p["g"] - * problem.p["rho"] - * problem.p["layer_length"] - * problem.p["num_layers"] - * problem.p["layer_height"] - ) - if dimension == 2: - dead_load *= 1 # m - elif dimension == 3: - dead_load *= problem.p["layer_width"] - - print("check", sum(force_bottom_y), dead_load) - assert sum(force_bottom_y) == pytest.approx(-dead_load) - - # check E modulus evolution over structure (each layer different E) - if mat.lower() == "thix": - E_bottom_layer = ConcreteAM.E_fkt( - 1, - problem.time, - { - "P0": problem.p["E_0"], - "R_P": problem.p["R_E"], - "A_P": problem.p["A_E"], - "tf_P": problem.p["tf_E"], - "age_0": problem.p["age_0"], - }, - ) - time_upper = problem.time - (problem.p["num_layers"] - 1) * time_layer.magnitude - E_upper_layer = ConcreteAM.E_fkt( - 1, - time_upper, - { - "P0": problem.p["E_0"], - "R_P": problem.p["R_E"], - "A_P": problem.p["A_E"], - "tf_P": problem.p["tf_E"], - "age_0": problem.p["age_0"], - }, - ) - - print("E_bottom, E_upper", E_bottom_layer, E_upper_layer) - print(problem.youngsmodulus.vector.array[:].min(), problem.youngsmodulus.vector.array[:].max()) - assert problem.youngsmodulus.vector.array[:].min() == pytest.approx(E_upper_layer) - assert problem.youngsmodulus.vector.array[:].max() == pytest.approx(E_bottom_layer) - # - if plot: - # example plotting - strain_yy = np.array(problem.sensors["StrainSensor"].data)[:, -1] - time = [] - [time.append(ti) for ti in problem.sensors["StrainSensor"].time] - - import matplotlib.pylab as plt - - plt.figure(1) - plt.plot([0] + time, [0] + list(strain_yy), "*-r") - plt.xlabel("process time") - plt.ylabel("sensor bottom middle strain_yy") - plt.show() - - -def define_path(prob, t_diff, t_0=0): - """create path as layer wise at quadrature space - - one layer by time - - prob: problem - param: parameter dictionary - t_diff: time difference between each layer - t_0: start time for all (0 if static computation) - (-end_time last layer if dynamic computation) - """ - - # init path time array - q_path = prob.rule.create_quadrature_array(prob.mesh, shape=1) - - # get quadrature coordinates with work around since tabulate_dof_coordinates()[:] not possible for quadrature spaces! - V = df.fem.VectorFunctionSpace(prob.mesh, ("CG", prob.p["degree"])) - v_cg = df.fem.Function(V) - v_cg.interpolate(lambda x: (x[0], x[1])) - positions = QuadratureEvaluator(v_cg, prob.mesh, prob.rule) - x = positions.evaluate() - dof_map = np.reshape(x.flatten(), [len(q_path), 2]) - - # select layers only by layer height - y - y_CO = np.array(dof_map)[:, 1] - h_min = np.arange(0, prob.p["num_layers"] * prob.p["layer_height"], prob.p["layer_height"]) - h_max = np.arange( - prob.p["layer_height"], - (prob.p["num_layers"] + 1) * prob.p["layer_height"], - prob.p["layer_height"], - ) - # print("y_CO", y_CO) - # print("h_min", h_min) - # print("h_max", h_max) - new_path = np.zeros_like(q_path) - EPS = 1e-8 - for i in range(0, len(h_min)): - layer_index = np.where((y_CO > h_min[i] - EPS) & (y_CO <= h_max[i] + EPS)) - new_path[layer_index] = t_0 + (prob.p["num_layers"] - 1 - i) * t_diff - - q_path = new_path - - return q_path - - -# -# if __name__ == "__main__": -# -# # test_am_single_layer(2, 2) -# # -# test_am_multiple_layer(2, "thix", True) diff --git a/tests/finite_element_problem/test_base_material.py b/tests/finite_element_problem/test_base_material.py deleted file mode 100644 index 0083c34..0000000 --- a/tests/finite_element_problem/test_base_material.py +++ /dev/null @@ -1,109 +0,0 @@ -import json -import os -from copy import deepcopy -from pathlib import Path - -import pytest - -from fenicsxconcrete.experimental_setup.cantilever_beam import CantileverBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor - - -def test_sensor_dict() -> None: - """This function tests the sensor dict""" - - default_experiment, fem_parameters = LinearElasticity.default_parameters() - problem = LinearElasticity(default_experiment, fem_parameters) - - sensor_location = [0.0, 0.0, 0.0] - sensor = DisplacementSensor([sensor_location]) - - # testing the renaming of multiple sensors - problem.add_sensor(sensor) - problem.add_sensor(sensor) - assert "DisplacementSensor2" in problem.sensors.keys() - - # accessing as attribute - assert problem.sensors.DisplacementSensor2 - - -def test_sensor_error() -> None: - """This function tests the add sensor function""" - - default_experiment, fem_parameters = LinearElasticity.default_parameters() - problem = LinearElasticity(default_experiment, fem_parameters) - - with pytest.raises(ValueError): - problem.add_sensor("not a sensor") - - -def test_sensor_options() -> None: - """This function tests the function of creating and deleting sensors""" - - # setting up problem - setup_parameters = CantileverBeam.default_parameters() - default_setup, fem_parameters = LinearElasticity.default_parameters() - - sensor_location = [setup_parameters["length"].magnitude, 0.0, 0.0] - sensor = DisplacementSensor(sensor_location) - - # setting up the problem - experiment = CantileverBeam(setup_parameters) # Specifies the domain, discretises it and apply Dirichlet BCs - problem = LinearElasticity(experiment, fem_parameters) - - # check that no sensors yet exist - assert problem.sensors == {} - - # add sensor - problem.add_sensor(sensor) - - # check that sensor exists - assert problem.sensors != {} - - # check that no data is in sensor - assert problem.sensors[sensor.name].data == [] - - # solving and plotting - problem.solve() - - # check that some data is in sensor - measure = deepcopy(problem.sensors[sensor.name].data) - assert measure != [] - - # check export sensor data - problem.export_sensors_metadata(Path("sensors_metadata.json")) - expected_metadata = { - "sensors": [ - { - "id": "DisplacementSensor", - "type": "DisplacementSensor", - "sensor_file": "displacement_sensor", - "units": "meter", - "dimensionality": "[length]", - "where": [1, 0.0, 0.0], - } - ] - } - with open("sensors_metadata.json", "r") as f: - sensor_metadata = json.load(f) - assert sensor_metadata == expected_metadata - - # check cleaning of sensor data - problem.clean_sensor_data() - assert problem.sensors[sensor.name].data == [] - - # delete sensor - problem.delete_sensor() - assert problem.sensors == {} - - # check import sensor data - problem.import_sensors_from_metadata(Path("sensors_metadata.json")) - - os.remove("sensors_metadata.json") - - # repeat solving and plotting - problem.solve() - - # repeat check that some data is in imported sensor - assert problem.sensors[sensor.name].data[0] == pytest.approx(measure[0]) diff --git a/tests/finite_element_problem/test_default_dictionaries.py b/tests/finite_element_problem/test_default_dictionaries.py deleted file mode 100644 index 0e9bfa8..0000000 --- a/tests/finite_element_problem/test_default_dictionaries.py +++ /dev/null @@ -1,56 +0,0 @@ -import copy - -import pytest - -from fenicsxconcrete.finite_element_problem.base_material import MaterialProblem -from fenicsxconcrete.finite_element_problem.concrete_am import ConcreteAM -from fenicsxconcrete.finite_element_problem.concrete_thermo_mechanical import ConcreteThermoMechanical -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.util import ureg - - -# @pytest.mark.parametrize("material_model", [LinearElasticity, ConcreteAM, ConcreteThermoMechanical]) -# def test_default_dictionaries(material_model: MaterialProblem) -> None: -# """This function creates experimental setups with the respective default dictionaries -# -# This makes sure all relevant values are included""" -# -# default_setup, default_parameters = material_model.default_parameters() -# -# fem_problem = material_model(default_setup, default_parameters) -# fem_problem.solve() -# -# # test that each parameter is truly required -# # a loop over all default parameters removes each on in turn and expects a key error from the initialized problem -# for key in default_parameters: -# with pytest.raises(KeyError) as ex: -# less_parameters = copy.deepcopy(default_parameters) -# less_parameters.pop(key) -# fem_problem = material_model(default_setup, less_parameters) -# fem_problem.solve() -# print(key, "seems to be an unneccessary key in the default dictionary") -# print(ex) -# since the default parameter are used in each experiment or material problem this test makes no sence anymore -@pytest.mark.parametrize("material_model", [LinearElasticity, ConcreteAM, ConcreteThermoMechanical]) -def test_dimensionality_check(material_model: MaterialProblem) -> None: - - default_setup, default_parameters = material_model.default_parameters() - - with pytest.raises(ValueError): - default_parameters["g"] = 3 * ureg("m") # gravity should be m/s² - fem_problem = material_model(default_setup, default_parameters) - - -@pytest.mark.parametrize("material_model", [LinearElasticity, ConcreteAM, ConcreteThermoMechanical]) -def test_default_parameters(material_model: MaterialProblem) -> None: - """This function tests if the default_parameters are complete""" - # default_material = LinearElasticity - - default_setup, default_parameters = material_model.default_parameters() - - try: - fem_problem = material_model(default_setup, default_parameters) - fem_problem.solve() - except KeyError: - print("default parameter dictionary is wrong") - raise ValueError diff --git a/tests/finite_element_problem/test_hydration_function.py b/tests/finite_element_problem/test_hydration_function.py deleted file mode 100644 index 25d2bbc..0000000 --- a/tests/finite_element_problem/test_hydration_function.py +++ /dev/null @@ -1,36 +0,0 @@ -import numpy as np -import pytest - -from fenicsxconcrete.finite_element_problem import ConcreteThermoMechanical -from fenicsxconcrete.util import ureg - - -def test_hydration_function(): - - T = ureg.Quantity(35.0, ureg.degC).to_base_units().magnitude - dt = 60 * 30 - time_list = [40000] - parameter = {} - parameter["B1"] = 2.916e-4 - parameter["B2"] = 0.0024229 - parameter["eta"] = 5.554 - parameter["alpha_max"] = 0.875 - parameter["E_act"] = 47002 - parameter["T_ref"] = ureg.Quantity(25.0, ureg.degC).to_base_units().magnitude - parameter["Q_pot"] = 500e3 - - # initiate material problem - experiment, parameters = ConcreteThermoMechanical.default_parameters() - material_problem = ConcreteThermoMechanical(experiment=experiment, parameters=parameters) - # get the respective function - hydration_fkt = material_problem.get_heat_of_hydration_ftk() - - heat_list, doh_list = hydration_fkt(T, time_list, dt, parameter) - # print(heat_list) - # exit() - assert heat_list == pytest.approx(np.array([239.06484735])) - assert doh_list == pytest.approx(np.array([0.47812969])) - # assert heat_list == pytest.approx(np.array([169.36164423])) - # assert doh_list == pytest.approx(np.array([0.33872329])) - # problem.experiment.apply_displ_load(displacement) - # problem.solve() # solving this diff --git a/tests/finite_element_problem/test_linear_cantilever_beam.py b/tests/finite_element_problem/test_linear_cantilever_beam.py deleted file mode 100644 index 8beae26..0000000 --- a/tests/finite_element_problem/test_linear_cantilever_beam.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -from pathlib import Path - -import numpy as np -import pytest - -from fenicsxconcrete.experimental_setup.cantilever_beam import CantileverBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.util import ureg - - -@pytest.mark.parametrize( - "dimension,results", - [ - [2, [-1.10366991e-06, -6.02823499e-06]], - [3, [-1.18487757e-06, 3.58357285e-10, -6.42126235e-06]], - ], -) -def test_linear_cantilever_beam(dimension: int, results: list[float]) -> None: - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_linear_cantilever_beam_{dimension}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exisits (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - setup_parameters = {} - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["height"] = 0.3 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = dimension * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - setup_parameters["num_elements_width"] = 3 * ureg("") # only relevant for 3D case - - fem_parameters = {} - fem_parameters["rho"] = 7750 * ureg("kg/m^3") - fem_parameters["E"] = 210e9 * ureg("N/m^2") - fem_parameters["nu"] = 0.28 * ureg("") - - # Defining sensor positions - # TODO: why do I need the third coordinate for a 2D problem?!? - sensor_location = [setup_parameters["length"].magnitude, 0.0, 0.0] - sensor = DisplacementSensor(sensor_location) - - # setting up the problem - experiment = CantileverBeam(setup_parameters) # Specifies the domain, discretises it and apply Dirichlet BCs - - problem = LinearElasticity(experiment, fem_parameters, pv_name=file_name, pv_path=data_path) - problem.add_sensor(sensor) - - # solving and plotting - problem.solve() - problem.pv_plot() - - # check if files are created - for file in files: - assert file.is_file() - - # check sensor output - displacement_data = problem.sensors["DisplacementSensor"].get_last_entry() - assert displacement_data.magnitude == pytest.approx(results) - - # Second test - # test linearity of material problem - increase = 3 - fem_parameters["E"] = fem_parameters["E"] * increase - problem2 = LinearElasticity(experiment, fem_parameters) - problem2.add_sensor(sensor) - problem2.solve() - displacement_data2 = problem2.sensors["DisplacementSensor"].get_last_entry() - - assert displacement_data2.magnitude * increase == pytest.approx(displacement_data.magnitude) - - # Third test - # solving several time steps - same result for each time step since no time dependent loads applied - time_end = 10.0 * ureg("s") - fem_parameters["dt"] = 2.0 * ureg("s") - problem3 = LinearElasticity(experiment, fem_parameters, pv_name=f"{file_name}_time", pv_path=data_path) - sensor1 = DisplacementSensor(sensor_location) # re-initialization - problem3.add_sensor(sensor1) - while problem3.time < time_end.to_base_units().magnitude: - problem3.solve() - problem3.pv_plot() - - displacement_data3 = problem3.sensors["DisplacementSensor"].get_last_entry() - - # check displacement output over time - assert displacement_data3.magnitude * increase == pytest.approx(displacement_data.magnitude) - assert np.diff(np.array(problem3.sensors["DisplacementSensor"].data)[:, 0]) == pytest.approx(0.0) - assert np.diff(np.array(problem3.sensors["DisplacementSensor"].data)[:, 1]) == pytest.approx(0.0) - # check length of output in time and time step - assert len(problem3.sensors["DisplacementSensor"].time) == pytest.approx(int(problem3.time / problem3.p["dt"])) - assert np.mean(np.diff(problem3.sensors["DisplacementSensor"].time)) == pytest.approx(problem3.p["dt"]) diff --git a/tests/finite_element_problem/test_linear_cylinder.py b/tests/finite_element_problem/test_linear_cylinder.py deleted file mode 100644 index 4835015..0000000 --- a/tests/finite_element_problem/test_linear_cylinder.py +++ /dev/null @@ -1,79 +0,0 @@ -import numpy as np -import pint -import pytest - -from fenicsxconcrete.experimental_setup.compression_cylinder import CompressionCylinder -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.reaction_force_sensor import ReactionForceSensor -from fenicsxconcrete.util import Parameters, ureg - - -def simple_setup( - p: Parameters, displacement: float, bc_setting: pint.Quantity -) -> tuple[float, dict[str, pint.Quantity]]: - parameters = {} - parameters["log_level"] = "WARNING" * ureg("") - parameters["bc_setting"] = bc_setting - parameters["mesh_density"] = 10 * ureg("") - parameters["E"] = 1023 * ureg("MPa") - parameters["nu"] = 0.0 * ureg("") - parameters["radius"] = 0.006 * ureg("m") - parameters["height"] = 0.012 * ureg("m") - parameters["dim"] = 3 * ureg("") - parameters["bc_setting"] = bc_setting * ureg("") - parameters["element_order"] = 2 * ureg("") - parameters["degree"] = 2 * ureg("") - - parameters.update(p) - - experiment = CompressionCylinder(parameters) - problem = LinearElasticity(experiment, parameters) - sensor = ReactionForceSensor() - problem.add_sensor(sensor) - problem.experiment.apply_displ_load(displacement) - - problem.solve() # solving this - - # last measurement, parameter dict - return problem.sensors[sensor.name].get_last_entry().magnitude[-1], problem.parameters - - -@pytest.mark.parametrize("dim", [2, 3]) -@pytest.mark.parametrize("degree", [1, 2]) -@pytest.mark.parametrize("bc_setting", ["fixed", "free"]) -def test_force_response(bc_setting: int, degree: int, dim: str) -> None: - p = {} - p["dim"] = dim * ureg("") - p["bc_setting"] = bc_setting * ureg("") - p["degree"] = degree * ureg("") - displacement = -0.003 * ureg("m") - - measured, fem_p = simple_setup(p, displacement, p["bc_setting"]) - - result = None - if dim == 2: - result = fem_p["E"] * fem_p["radius"] * 2 * displacement / fem_p["height"] - elif dim == 3: - result = fem_p["E"] * np.pi * fem_p["radius"] ** 2 * displacement / fem_p["height"] - - assert measured == pytest.approx(result.magnitude, 0.05) - - -@pytest.mark.parametrize("bc_setting", ["fixed", "free"]) -def test_errors_dimensions(bc_setting: str) -> None: - p = {} - displacement = -0.003 * ureg("m") - p["bc_setting"] = bc_setting * ureg("") - p["dim"] = 4 * ureg("") - - with pytest.raises(ValueError): - measured, fem_p = simple_setup(p, displacement, p["bc_setting"]) - - -def test_errors_bc_setting() -> None: - p = {} - displacement = -0.003 * ureg("m") - p["bc_setting"] = "wrong" * ureg("") - - with pytest.raises(ValueError): - measured, fem_p = simple_setup(p, displacement, p["bc_setting"]) diff --git a/tests/finite_element_problem/test_linear_simple_beam.py b/tests/finite_element_problem/test_linear_simple_beam.py deleted file mode 100644 index 053f4c6..0000000 --- a/tests/finite_element_problem/test_linear_simple_beam.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -from pathlib import Path - -import pytest - -from fenicsxconcrete.experimental_setup.simple_beam import SimpleBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.util import Parameters, ureg - - -@pytest.mark.parametrize( - "dimension, results", - [ - [2, [0.00105057, -0.01310806]], - [3, [1.13946512e-03, 1.42368783e-05, -1.42250453e-02]], - ], -) -def test_linear_simple_beam(dimension: int, results: list[float]) -> None: - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_linear_simple_beam_{dimension}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exists (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - setup_parameters = {} - setup_parameters["length"] = 10 * ureg("m") - setup_parameters["height"] = 0.5 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = dimension * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - setup_parameters["num_elements_width"] = 3 * ureg("") # only relevant for 3D case - setup_parameters["load"] = 200 * ureg("kN/m^2") - - fem_parameters = {} - fem_parameters["rho"] = 7750 * ureg("kg/m^3") - fem_parameters["E"] = 210e9 * ureg("N/m^2") - fem_parameters["nu"] = 0.28 * ureg("") - - # Defining sensor positions - sensor_location = [setup_parameters["length"].magnitude / 2, 0.0, 0.0] - sensor = DisplacementSensor(sensor_location) - - # setting up the problem - experiment = SimpleBeam(setup_parameters) # Specifies the domain, discretises it and apply Dirichlet BCs - problem = LinearElasticity(experiment, fem_parameters, pv_name=file_name, pv_path=data_path) - problem.add_sensor(sensor) - - # solving and plotting - problem.solve() - problem.pv_plot() - - # check if files are created - for file in files: - assert file.is_file() - - # check sensor output - displacement_data = problem.sensors["DisplacementSensor"].get_last_entry() - assert displacement_data.magnitude == pytest.approx(results, 1e-5) diff --git a/tests/finite_element_problem/test_linear_simple_cube.py b/tests/finite_element_problem/test_linear_simple_cube.py deleted file mode 100644 index 5a8ea2a..0000000 --- a/tests/finite_element_problem/test_linear_simple_cube.py +++ /dev/null @@ -1,157 +0,0 @@ -import os -from pathlib import Path - -import numpy as np -import pytest - -from fenicsxconcrete.experimental_setup.simple_cube import SimpleCube -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.sensor_definition.strain_sensor import StrainSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor -from fenicsxconcrete.util import ureg - - -@pytest.mark.parametrize("dim", [2, 3]) -def test_disp(dim: int) -> None: - """uniaxial tension test for different dimensions (dim)""" - - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_linear_uniaxial_{dim}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exisits (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - # defining experiment parameters - parameters = {} - - parameters["dim"] = dim * ureg("") - parameters["num_elements_length"] = 2 * ureg("") - parameters["num_elements_height"] = 2 * ureg("") - parameters["num_elements_width"] = 2 * ureg("") - - displacement = 0.01 * ureg("m") - - parameters["rho"] = 7750 * ureg("kg/m^3") - parameters["E"] = 210e9 * ureg("N/m^2") - parameters["nu"] = 0.28 * ureg("") - parameters["strain_state"] = "uniaxial" * ureg("") - - if dim == 2: - # change default stress_state - parameters["stress_state"] = "plane_stress" * ureg("") - - # setting up the problem - experiment = SimpleCube(parameters) - problem = LinearElasticity(experiment, parameters, pv_name=file_name, pv_path=data_path) - - if dim == 2: - sensor_location = [0.5, 0.5, 0.0] - elif dim == 3: - sensor_location = [0.5, 0.5, 0.5] - - # add sensors - problem.add_sensor(StressSensor(sensor_location)) - problem.add_sensor(StrainSensor(sensor_location)) - - # apply displacement load and solve - problem.experiment.apply_displ_load(displacement) - problem.solve() - problem.pv_plot() - - # checks - analytic_eps = (displacement.to_base_units() / (1.0 * ureg("m"))).magnitude - - strain_result = problem.sensors["StrainSensor"].get_last_entry().magnitude - stress_result = problem.sensors["StressSensor"].get_last_entry().magnitude - if dim == 2: - # strain in yy direction - assert strain_result[-1] == pytest.approx(analytic_eps) - # strain in xx direction - assert strain_result[0] == pytest.approx(-problem.parameters["nu"].magnitude * analytic_eps) - # strain in xy and yx direction - assert strain_result[1] == pytest.approx(strain_result[2]) - assert strain_result[1] == pytest.approx(0.0) - # stress in yy direction - assert stress_result[-1] == pytest.approx((analytic_eps * problem.parameters["E"]).magnitude) - - elif dim == 3: - # strain in zz direction - assert strain_result[-1] == pytest.approx(analytic_eps) - # strain in yy direction - assert strain_result[4] == pytest.approx(-problem.parameters["nu"].magnitude * analytic_eps) - # strain in xx direction - assert strain_result[0] == pytest.approx(-problem.parameters["nu"].magnitude * analytic_eps) - # shear strains - sum_mixed_strains = ( - strain_result[1] # xy - - strain_result[3] # yx - - strain_result[2] # xz - - strain_result[6] # zx - - strain_result[5] # yz - - strain_result[7] # zy - ) - assert sum_mixed_strains == pytest.approx(0.0) - - # stress in zz direction - assert stress_result[-1] == pytest.approx((analytic_eps * problem.parameters["E"].magnitude)) - - -@pytest.mark.parametrize("dim", [2, 3]) -def test_strain_state_error(dim: int) -> None: - # setup_parameters = SimpleCube.default_parameters() - setup_parameters = {} # use default parameters - setup_parameters["dim"] = dim * ureg("") - setup_parameters["strain_state"] = "wrong" * ureg("") - setup = SimpleCube(setup_parameters) - default_setup, fem_parameters = LinearElasticity.default_parameters() - with pytest.raises(ValueError): - fem_problem = LinearElasticity(setup, fem_parameters) - - -@pytest.mark.parametrize("dim", [2, 3]) -@pytest.mark.parametrize("degree", [1, 2]) -def test_multiaxial_strain(dim: int, degree: int) -> None: - # setup_parameters = SimpleCube.default_parameters() - setup_parameters = {} # use default parameters - setup_parameters["dim"] = dim * ureg("") - setup_parameters["degree"] = degree * ureg("") - setup_parameters["strain_state"] = "multiaxial" * ureg("") - setup = SimpleCube(setup_parameters) - default_setup, fem_parameters = LinearElasticity.default_parameters() - fem_problem = LinearElasticity(setup, fem_parameters) - - displ = -0.01 - fem_problem.experiment.apply_displ_load(displ * ureg("m")) - - if dim == 2: - target = np.array([displ, displ]) - sensor_location_corner = [fem_problem.p["length"], fem_problem.p["height"], 0.0] - sensor_location_center = [fem_problem.p["length"] / 2, fem_problem.p["height"] / 2, 0.0] - elif dim == 3: - target = np.array([displ, displ, displ]) - sensor_location_corner = [fem_problem.p["length"], fem_problem.p["width"], fem_problem.p["height"]] - sensor_location_center = [ - fem_problem.p["length"] / 2, - fem_problem.p["height"] / 2, - fem_problem.p["height"] / 2, - ] - - sensor_corner = DisplacementSensor(where=sensor_location_corner, name="displacement_corner") - sensor_center = DisplacementSensor(where=sensor_location_center, name="displacement_center") - - fem_problem.add_sensor(sensor_corner) - fem_problem.add_sensor(sensor_center) - - fem_problem.solve() - result_corner = fem_problem.sensors.displacement_corner.get_last_entry().magnitude - result_center = fem_problem.sensors.displacement_center.get_last_entry().magnitude - - assert result_corner == pytest.approx(target) - assert result_center == pytest.approx(target / 2) diff --git a/tests/finite_element_problem/test_linear_tensile_beam.py b/tests/finite_element_problem/test_linear_tensile_beam.py deleted file mode 100644 index a49e0ad..0000000 --- a/tests/finite_element_problem/test_linear_tensile_beam.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -from pathlib import Path - -import pytest - -from fenicsxconcrete.experimental_setup.tensile_beam import TensileBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.util import Parameters, ureg - - -@pytest.mark.parametrize( - "dimension,results", - [ - [2, [8.71973440e-06, 5.12000403e-07]], - [3, [9.44327797e-06, 4.00000088e-07, 4.00000088e-07]], - ], -) -def test_linear_tensile_beam(dimension: int, results: list[float]) -> None: - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_linear_tensile_beam_{dimension}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exisits (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - setup_parameters = {} - setup_parameters["length"] = 1 * ureg("m") - setup_parameters["height"] = 0.3 * ureg("m") - setup_parameters["width"] = 0.3 * ureg("m") # only relevant for 3D case - setup_parameters["dim"] = dimension * ureg("") - setup_parameters["num_elements_length"] = 10 * ureg("") - setup_parameters["num_elements_height"] = 3 * ureg("") - setup_parameters["num_elements_width"] = 3 * ureg("") # only relevant for 3D case - setup_parameters["load"] = 2000 * ureg("kN") - - fem_parameters = {} - fem_parameters["rho"] = 7750 * ureg("kg/m^3") - fem_parameters["E"] = 210e9 * ureg("N/m^2") - fem_parameters["nu"] = 0.28 * ureg("") - - # Defining sensor positions - sensor_location = [setup_parameters["length"].magnitude, 0.0, 0.0] - sensor = DisplacementSensor(sensor_location) - - # setting up the problem - experiment = TensileBeam(setup_parameters) # Specifies the domain, discretises it and apply Dirichlet BCs - problem = LinearElasticity(experiment, fem_parameters, pv_name=file_name, pv_path=data_path) - problem.add_sensor(sensor) - - # solving and plotting - problem.solve() - problem.pv_plot() - - # check if files are created - for file in files: - assert file.is_file() - - # check sensor output - displacement_data = problem.sensors["DisplacementSensor"].get_last_entry() - assert displacement_data.magnitude == pytest.approx(results) diff --git a/tests/finite_element_problem/test_thermo_mechanical_cube.py b/tests/finite_element_problem/test_thermo_mechanical_cube.py deleted file mode 100644 index cb1cefa..0000000 --- a/tests/finite_element_problem/test_thermo_mechanical_cube.py +++ /dev/null @@ -1,240 +0,0 @@ -import os -from pathlib import Path - -import dolfinx as df -import numpy as np -import pytest -from mpi4py import MPI - -from fenicsxconcrete.boundary_conditions import BoundaryConditions -from fenicsxconcrete.experimental_setup import SimpleCube -from fenicsxconcrete.finite_element_problem import ConcreteThermoMechanical, LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.sensor_definition.doh_sensor import DOHSensor -from fenicsxconcrete.sensor_definition.strain_sensor import StrainSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor -from fenicsxconcrete.sensor_definition.temperature_sensor import TemperatureSensor -from fenicsxconcrete.sensor_definition.youngs_modulus_sensor import YoungsModulusSensor -from fenicsxconcrete.util import ureg - - -@pytest.mark.parametrize("dim", [2, 3]) -def test_mechanical_only(dim: int) -> None: - # defining experiment parameters - parameters_exp = {} - parameters_exp["dim"] = dim * ureg("") - parameters_exp["num_elements_length"] = 2 * ureg("") - parameters_exp["num_elements_height"] = 2 * ureg("") - parameters_exp["num_elements_width"] = 2 * ureg("") - parameters_exp["strain_state"] = "uniaxial" * ureg("") - - displacement = 0.01 * ureg("m") - - parameters = {} - parameters["rho"] = 7750 * ureg("kg/m^3") - parameters["E"] = 210e9 * ureg("N/m^2") - parameters["nu"] = 0.28 * ureg("") - - experiment = SimpleCube(parameters_exp) - - _, parameters_thermo = ConcreteThermoMechanical.default_parameters() - parameters_thermo["nu"] = parameters["nu"].copy() - parameters_thermo["E_28"] = parameters["E"].copy() - parameters_thermo["q_degree"] = 4 * ureg("") - - problem_thermo_mechanical = ConcreteThermoMechanical( - experiment, parameters_thermo, pv_name=f"thermo_mechanical_test_{dim}", pv_path="" - ) - - # apply displacement load and solve - experiment.apply_displ_load(displacement) - experiment.apply_body_force() - - # problem_thermo_mechanical.experiment.apply_displ_load(displacement) - problem_thermo_mechanical.temperature_problem.q_alpha.vector.array[:] = parameters_thermo["alpha_max"].magnitude - - problem_thermo_mechanical.mechanics_solver.solve(problem_thermo_mechanical.fields.displacement) - - problem_thermo_mechanical.pv_plot() - - # set E for the elastic problem - parameters["E"] = problem_thermo_mechanical.q_fields.youngs_modulus.vector.array[:][0] * ureg("N/m^2") - - problem_elastic = LinearElasticity(experiment, parameters, pv_name=f"pure_mechanical_test_{dim}", pv_path="") - - problem_elastic.solve() - problem_elastic.pv_plot() - - assert problem_thermo_mechanical.q_fields.youngs_modulus.vector.array[:] == pytest.approx( - parameters["E"].magnitude - ) - - np.testing.assert_allclose( - problem_thermo_mechanical.fields.displacement.vector.array, - problem_elastic.fields.displacement.vector.array, - rtol=1e-4, - ) - - -class LegacyMinimalCube(SimpleCube): - def setup(self) -> None: - """Generates the mesh in 2D or 3D based on parameters - - Raises: - ValueError: if dimension (self.p["dim"]) is not 2 or 3 - """ - - self.logger.debug("setup mesh for %s", self.p["dim"]) - - if self.p["dim"] == 2: - # build a rectangular mesh - self.mesh = df.mesh.create_rectangle( - MPI.COMM_WORLD, - [ - [0.0, 0.0], - [self.p["length"], self.p["height"]], - ], - [self.p["num_elements_length"], self.p["num_elements_height"]], - cell_type=df.mesh.CellType.triangle, - ) - elif self.p["dim"] == 3: - self.mesh = df.mesh.create_box( - MPI.COMM_WORLD, - [ - [0.0, 0.0, 0.0], - [self.p["length"], self.p["width"], self.p["height"]], - ], - [self.p["num_elements_length"], self.p["num_elements_width"], self.p["num_elements_height"]], - cell_type=df.mesh.CellType.tetrahedron, - ) - - else: - raise ValueError(f"wrong dimension {self.p['dim']} for problem setup") - - # initialize variable top_displacement - self.top_displacement = df.fem.Constant(domain=self.mesh, c=0.0) # applied via fkt: apply_displ_load(...) - self.use_body_force = False - self.temperature_bc = df.fem.Constant(domain=self.mesh, c=self.p["T_bc"]) - - def create_displacement_boundary(self, V: df.fem.FunctionSpace) -> list[df.fem.bcs.DirichletBCMetaClass]: - bc_generator = BoundaryConditions(self.mesh, V) - - bc_generator.add_dirichlet_bc( - df.fem.Constant(self.mesh, np.zeros(self.p["dim"])), - boundary=self.boundary_bottom(), - method="geometrical", - entity_dim=self.mesh.topology.dim - 1, - ) - - return bc_generator.bcs - - -@pytest.mark.parametrize("dim", [3]) -def test_hydration_with_body_forces(dim: int): - # This test relies on data from the old repository - - parameters = {} # using the current default values - # general - # parameters["log_level"] = "WARNING" * ureg("") - # mesh - # parameters["mesh_setting"] = "left/right" * ureg("") # default boundary setting - parameters["dim"] = dim * ureg("") - # parameters["mesh_density"] = 2 * ureg("") - parameters["length"] = 1.0 * ureg("m") - parameters["width"] = 1.0 * ureg("m") - parameters["height"] = 1.0 * ureg("m") - - # Differs from the old repository - parameters["num_elements_length"] = 2 * ureg("") - parameters["num_elements_width"] = 2 * ureg("") - parameters["num_elements_height"] = 2 * ureg("") - - # temperature boundary - # parameters["bc_setting"] = "full" * ureg("") - parameters["T_0"] = ureg.Quantity(20.0, ureg.degC) # inital concrete temperature - parameters["T_bc1"] = ureg.Quantity(20.0, ureg.degC) # temperature boundary value 1 - - # Differs from the old repository - parameters["rho"] = 2350 * ureg("kg/m^3") # in kg/m^3 density of concrete - - parameters["density_binder"] = 1440 * ureg("kg/m^3") # in kg/m^3 density of the binder - parameters["thermal_cond"] = 2.0 * ureg("W/(m*K)") # effective thermal conductivity, approx in W(mK)^-1, concrete! - # self.specific_heat_capacity = 9000 # effective specific heat capacity in J kg⁻1 K⁻1 - parameters["vol_heat_cap"] = 2.4e6 * ureg("J/(m^3 * K)") # volumetric heat cap J/(m3 K) - # parameters["b_ratio"] = 0.2 # volume percentage of binder - parameters["Q_pot"] = 500e3 * ureg("J/kg") # potential heat per weight of binder in J/kg - parameters["Q_inf"] = 144000000 * ureg("J/m^3") - # p['Q_inf'] = self.Q_pot * self.density_binder * self.b_ratio # potential heat per concrete volume in J/m3 - parameters["B1"] = 2.916e-4 * ureg("1/s") # in 1/s - parameters["B2"] = 0.0024229 * ureg("1/s") # - - parameters["eta"] = 5.554 * ureg("") # something about diffusion - parameters["alpha_max"] = 0.87 * ureg("") # also possible to approximate based on equation with w/c - parameters["alpha_tx"] = 0.68 * ureg("") # also possible to approximate based on equation with w/c - parameters["E_act"] = 5653 * 8.3145 * ureg("J*mol^-1") # activation energy in Jmol^-1 - parameters["T_ref"] = ureg.Quantity(25.0, ureg.degC) # reference temperature in degree celsius - parameters["T_0"] = ureg.Quantity(20.0, ureg.degC) # reference temperature in degree celsius - # setting for temperature adjustment - parameters["temp_adjust_law"] = "exponential" * ureg("") - # polinomial degree - parameters["degree"] = 2 * ureg("") # default boundary setting - parameters["q_degree"] = 2 * ureg("") - ### paramters for mechanics problem - parameters["E_28"] = 42000000.0 * ureg("N/m^2") # Youngs Modulus N/m2 or something... - parameters["nu"] = 0.2 * ureg("") # Poissons Ratio - # required paramters for alpha to E mapping - parameters["alpha_t"] = 0.2 * ureg("") - parameters["alpha_0"] = 0.05 * ureg("") - parameters["a_E"] = 0.6 * ureg("") - # required paramters for alpha to tensile and compressive stiffness mapping - parameters["fc_inf"] = 6210000 * ureg("") - parameters["a_fc"] = 1.2 * ureg("") - parameters["ft_inf"] = 467000 * ureg("") - parameters["a_ft"] = 1.0 * ureg("") - parameters["igc"] = 8.3145 * ureg("J/K/mol") - parameters["evolution_ft"] = "False" * ureg("") - parameters["dt"] = 60.0 * ureg("min") - - experiment = LegacyMinimalCube(parameters) - experiment.apply_body_force() - problem = ConcreteThermoMechanical( - experiment=experiment, parameters=parameters, pv_name=f"thermo_mechanical_stuff_{dim}" - ) - # problem = fenics_concrete.ConcreteThermoMechanical(experiment=experiment, parameters=parameters, vmapoutput=False) - - doh_sensor = DOHSensor([0.25, 0.25, 0.25], name="doh") - E_sensor = YoungsModulusSensor([0.25, 0.25, 0.25], name="E") - T_sensor = TemperatureSensor([0.25, 0.25, 0.25], name="T") - - problem.add_sensor(doh_sensor) - problem.add_sensor(E_sensor) - problem.add_sensor(T_sensor) - # initialize time - t = problem.p["dt"] # first time step time - problem.time = t - t_list = [] - u_list = [] - temperature_list = [] - doh = 0 - print(problem.p) - while doh < parameters["alpha_tx"]: # time - # solve temp-hydration-mechanics - t_list.append(problem.time) - problem.solve() # solving this - doh = doh_sensor.data[-1] - # u_list.append(problem.fields.displacement.vector.array[:]) - # temperature_list.append(problem.fields.temperature.vector.array[:]) - problem.pv_plot() - - data = np.load(Path(__file__).parent / "fenics_concrete_thermo_mechanical.npz") - - # find dofs of point [0.25, 0.25, 0.25] in legacy data for comparison - T_dofs = np.argwhere(np.sum(np.abs(data["dof_map_t"] - np.array([0.25, 0.25, 0.25])), axis=1) < 1e-4) - - if T_dofs.size > 0: - T_dof = T_dofs[0] - T_list = data["T"][:, T_dof] - np.testing.assert_allclose(np.array(T_sensor.data).flatten(), T_list.flatten(), rtol=1e-4) - - np.testing.assert_allclose(data["t"], t_list) - np.testing.assert_allclose(data["doh"].flatten(), np.array(doh_sensor.data).flatten(), rtol=1e-4) - np.testing.assert_allclose(data["E"].flatten(), np.array(E_sensor.data).flatten(), rtol=1e-4) diff --git a/tests/finite_element_problem/test_thixotropy_uniaxial.py b/tests/finite_element_problem/test_thixotropy_uniaxial.py deleted file mode 100644 index 532c60e..0000000 --- a/tests/finite_element_problem/test_thixotropy_uniaxial.py +++ /dev/null @@ -1,202 +0,0 @@ -import os -from pathlib import Path - -import pint -import pytest - -from fenicsxconcrete.experimental_setup import SimpleCube -from fenicsxconcrete.finite_element_problem import ConcreteAM, ConcreteThixElasticModel -from fenicsxconcrete.sensor_definition.strain_sensor import StrainSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor -from fenicsxconcrete.util import ureg - - -def disp_over_time(current_time: pint.Quantity, switch_time: pint.Quantity) -> pint.Quantity: - """linear ramp of displacement bc over time - - Args: - t: current time - - Returns: displacement value for given time - - """ - if current_time <= switch_time: - current_disp = 0.1 * ureg("m") / (switch_time) * current_time - else: - current_disp = 0.1 * ureg("m") - - return current_disp - - -@pytest.mark.parametrize("dim", [2, 3]) -@pytest.mark.parametrize("degree", [1, 2]) -def test_disp(dim: int, degree: int): - """uniaxial test displacement controlled - - Args: - dim: dimension of the test (2 or 3) - degree: polynominal degree - """ - - # setup paths and directories - data_dir = "data_files" - data_path = Path(__file__).parent / data_dir - - # define file name and path for paraview output - file_name = f"test_thixotropy_uniaxial_{dim}d" - files = [data_path / (file_name + ".xdmf"), data_path / (file_name + ".h5")] - # delete file if it exists (only relevant for local tests) - for file in files: - if file.is_file(): - os.remove(file) - - # defining experiment parameters - parameters = {} - - parameters["dim"] = dim * ureg("") - parameters["num_elements_length"] = 2 * ureg("") - parameters["num_elements_height"] = 2 * ureg("") - parameters["num_elements_width"] = 2 * ureg("") - - if dim == 2: - parameters["stress_state"] = "plane_stress" * ureg("") - - # setting up the problem - experiment = SimpleCube(parameters) - - # get description of parameters - des = ConcreteAM.parameter_description() - print(des) - - # use default parameters (default) and change accordingly to cases - parameters["degree"] = degree * ureg("") - if dim == 3: - parameters["q_degree"] = 4 * ureg("") - - # defining time parameters - parameters["dt"] = 1 * 60 * ureg("s") - total_time = 6 * 60 * ureg("s") - - # displacement controlled uniaxial test with no body force - parameters["strain_state"] = "uniaxial" * ureg("") - displacement = disp_over_time - parameters["density"] = 0 * ureg("kg/m^3") # no body force!! - - problem = ConcreteAM( - experiment, parameters, nonlinear_problem=ConcreteThixElasticModel, pv_name=file_name, pv_path=data_path - ) - - # add sensors - if dim == 2: - problem.add_sensor(StressSensor([0.5, 0.5, 0.0])) - problem.add_sensor(StrainSensor([0.5, 0.5, 0.0])) - elif dim == 3: - problem.add_sensor(StressSensor([0.5, 0.5, 0.5])) - problem.add_sensor(StrainSensor([0.5, 0.5, 0.5])) - - E_o_time = [] - disp_o_time = [0.0] - while problem.time <= total_time.to_base_units().magnitude: - # apply increment displacements!!! - disp_o_time.append( - displacement((problem.time + problem.p["dt"]) * ureg("s"), 2 * parameters["dt"]).to_base_units() - ) - delta_disp = disp_o_time[-1] - disp_o_time[-2] - problem.experiment.apply_displ_load(delta_disp) - - problem.solve() - problem.pv_plot() - print("problem time", problem.time) - print("computed disp", problem.fields.displacement.x.array[:].max()) - - # store Young's modulus over time - E_o_time.append(problem.youngsmodulus.vector.array[:].max()) - - print("Stress sensor", problem.sensors["StressSensor"].data) - print("strain sensor", problem.sensors["StrainSensor"].data) - print("time", problem.sensors["StrainSensor"].time) - print("E modul", E_o_time) - - check_disp_case(problem, parameters["dt"], E_o_time) - - -def check_disp_case(problem: ConcreteAM, dt: pint.Quantity, E_o_time: list[float]) -> None: - """checks for displacement controlled version - - Args: - problem: concreteam problem instance - dt: time step parameter - E_o_time: Youngs modulus values over time - - """ - - disp_at_end = disp_over_time(problem.sensors["StrainSensor"].time[-1] * ureg("s"), 2 * dt).to_base_units() - analytic_eps = (disp_at_end / (1.0 * ureg("m"))).magnitude - disp_dt1 = disp_over_time(problem.sensors["StrainSensor"].time[0] * ureg("s"), 2 * dt).to_base_units() - analytic_eps_dt1 = (disp_dt1 / (1.0 * ureg("m"))).magnitude - print(analytic_eps, analytic_eps_dt1, disp_at_end, disp_dt1) - - if problem.p["dim"] == 2: - # standard uniaxial checks for last time step - print("analytic_eps", analytic_eps, problem.p["nu"] * analytic_eps) - # strain in yy direction - assert problem.sensors["StrainSensor"].data[-1][-1] == pytest.approx(analytic_eps) - # strain in xx direction - assert problem.sensors["StrainSensor"].data[-1][0] == pytest.approx(-problem.p["nu"] * analytic_eps) - # strain in xy and yx direction - assert problem.sensors["StrainSensor"].data[-1][1] == pytest.approx( - problem.sensors["StrainSensor"].data[-1][2] - ) - assert problem.sensors["StrainSensor"].data[-1][1] == pytest.approx(0.0) - - # thix related tests - # thix tests stress in yy first time step - assert problem.sensors["StressSensor"].data[0][-1] == pytest.approx((analytic_eps_dt1 * E_o_time[0])) - # stress delta between last time steps - assert problem.sensors["StressSensor"].data[-1][-1] - problem.sensors["StressSensor"].data[-2][ - -1 - ] == pytest.approx(0.0) - elif problem.p["dim"] == 3: - # standard uniaxial checks for last time step - # strain in zz direction - assert problem.sensors["StrainSensor"].data[-1][-1] == pytest.approx(analytic_eps) - # strain in yy direction - assert problem.sensors["StrainSensor"].data[-1][4] == pytest.approx(-problem.p["nu"] * analytic_eps) - # strain in xx direction - assert problem.sensors["StrainSensor"].data[-1][0] == pytest.approx(-problem.p["nu"] * analytic_eps) - # shear strains - sum_mixed_strains = ( - problem.sensors["StrainSensor"].data[-1][1] # xy - - problem.sensors["StrainSensor"].data[-1][3] # yx - - problem.sensors["StrainSensor"].data[-1][2] # xz - - problem.sensors["StrainSensor"].data[-1][6] # zx - - problem.sensors["StrainSensor"].data[-1][5] # yz - - problem.sensors["StrainSensor"].data[-1][7] # zy - ) - assert sum_mixed_strains == pytest.approx(0.0) - - # thix related tests - # thix tests stress in zz direction first time step - assert problem.sensors["StressSensor"].data[0][-1] == pytest.approx(analytic_eps_dt1 * E_o_time[0]) - # stress delta between last time steps - assert problem.sensors["StressSensor"].data[-1][-1] - problem.sensors["StressSensor"].data[-2][ - -1 - ] == pytest.approx(0.0) - - # check changing youngs modulus - if problem.time < problem.p["tf_E"]: - E_end = problem.p["E_0"] + problem.p["R_E"] * (problem.time + problem.p["age_0"]) - else: - E_end = ( - problem.p["E_0"] - + problem.p["R_E"] * problem.p["tf_E"] - + problem.p["A_E"] * (problem.time + problem.p["age_0"] - problem.p["tf_E"]) - ) - assert E_o_time[-1] == pytest.approx(E_end) - - -# if __name__ == "__main__": -# -# test_disp(2, 2) - -# test_disp(3, 1) diff --git a/tests/sensor_definition/test_point_sensors.py b/tests/sensor_definition/test_point_sensors.py deleted file mode 100644 index 57f36f3..0000000 --- a/tests/sensor_definition/test_point_sensors.py +++ /dev/null @@ -1,40 +0,0 @@ -import numpy as np -import pytest - -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.displacement_sensor import DisplacementSensor -from fenicsxconcrete.sensor_definition.strain_sensor import StrainSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor - - -@pytest.mark.parametrize("point_sensor", [DisplacementSensor, StressSensor, StrainSensor]) -def test_point_sensor(point_sensor) -> None: - default_setup, default_parameters = LinearElasticity.default_parameters() - - fem_problem = LinearElasticity(default_setup, default_parameters) - - # define sensors - sensor_location = [0.0, 0.0, 0.0] - sensor = point_sensor(sensor_location) - - fem_problem.add_sensor(sensor) - - # check that there is no stored data and test Error - with pytest.raises(RuntimeError): - fem_problem.sensors[sensor.name].get_last_entry() - - fem_problem.solve() - - # check that something is stored - data = fem_problem.sensors[sensor.name].get_last_entry() - assert data is not None - - # check that location metadata is reported correctly - # other metadata tested in test_sensors.py - metadata = sensor.report_metadata() - assert metadata["where"] == sensor_location - # check if location is not a list - sensor_location = np.array([0.0, 0.0, 0.0]) - sensor = point_sensor(sensor_location) - metadata = sensor.report_metadata() - assert metadata["where"] == pytest.approx(sensor_location) diff --git a/tests/sensor_definition/test_reaction_force_sensor.py b/tests/sensor_definition/test_reaction_force_sensor.py deleted file mode 100644 index 3351f6e..0000000 --- a/tests/sensor_definition/test_reaction_force_sensor.py +++ /dev/null @@ -1,149 +0,0 @@ -import pytest - -from fenicsxconcrete.experimental_setup.compression_cylinder import CompressionCylinder -from fenicsxconcrete.experimental_setup.simple_cube import SimpleCube -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition.reaction_force_sensor import ReactionForceSensor -from fenicsxconcrete.sensor_definition.stress_sensor import StressSensor -from fenicsxconcrete.util import ureg - - -def test_reaction_force_sensor() -> None: - default_setup, default_parameters = LinearElasticity.default_parameters() - setup = CompressionCylinder(CompressionCylinder.default_parameters()) - - fem_problem = LinearElasticity(setup, default_parameters) - - # define sensors - sensor1 = ReactionForceSensor() - fem_problem.add_sensor(sensor1) - sensor2 = ReactionForceSensor(surface={"function": "boundary_bottom", "args": {}}) - fem_problem.add_sensor(sensor2) - sensor3 = ReactionForceSensor(surface={"function": "boundary_top", "args": {}}, name="top_sensor") - fem_problem.add_sensor(sensor3) - - fem_problem.experiment.apply_displ_load(-0.001 * ureg("m")) - fem_problem.solve() - - # testing default value - assert ( - fem_problem.sensors.ReactionForceSensor.get_last_entry() - == fem_problem.sensors.ReactionForceSensor2.get_last_entry() - ).all() - - # testing top boundary value - assert fem_problem.sensors.ReactionForceSensor.get_last_entry().magnitude[-1] == pytest.approx( - -1 * fem_problem.sensors.top_sensor.get_last_entry().magnitude[-1] - ) - - -@pytest.mark.parametrize("dim", [2, 3]) -@pytest.mark.parametrize("degree", [1, 2]) -def test_full_boundary_reaction(dim: int, degree: int) -> None: - setup_parameters = SimpleCube.default_parameters() - setup_parameters["dim"] = dim * ureg("") - setup_parameters["degree"] = degree * ureg("") - setup_parameters["strain_state"] = "multiaxial" * ureg("") - cube = SimpleCube(setup_parameters) - default_setup, fem_parameters = LinearElasticity.default_parameters() - fem_parameters["nu"] = 0.2 * ureg("") - fem_problem = LinearElasticity(cube, fem_parameters) - - # define reactionforce sensors - sensor = ReactionForceSensor(surface={"function": "boundary_left", "args": {}}, name="ReactionForceSensorLeft") - fem_problem.add_sensor(sensor) - sensor = ReactionForceSensor(surface={"function": "boundary_right", "args": {}}, name="ReactionForceSensorRight") - fem_problem.add_sensor(sensor) - sensor = ReactionForceSensor(surface={"function": "boundary_top", "args": {}}, name="ReactionForceSensorTop") - fem_problem.add_sensor(sensor) - sensor = ReactionForceSensor(surface={"function": "boundary_bottom", "args": {}}, name="ReactionForceSensorBottom") - fem_problem.add_sensor(sensor) - if dim == 3: - sensor = ReactionForceSensor( - surface={"function": "boundary_front", "args": {}}, name="ReactionForceSensorFront" - ) - fem_problem.add_sensor(sensor) - sensor = ReactionForceSensor(surface={"function": "boundary_back", "args": {}}, name="ReactionForceSensorBack") - fem_problem.add_sensor(sensor) - - fem_problem.experiment.apply_displ_load(0.002 * ureg("m")) - fem_problem.solve() - - force_left = fem_problem.sensors.ReactionForceSensorLeft.get_last_entry().magnitude[0] - force_right = fem_problem.sensors.ReactionForceSensorRight.get_last_entry().magnitude[0] - force_top = fem_problem.sensors.ReactionForceSensorTop.get_last_entry().magnitude[-1] - force_bottom = fem_problem.sensors.ReactionForceSensorBottom.get_last_entry().magnitude[-1] - - # checking opposing forces left-right and top-bottom - assert force_left == pytest.approx(-1 * force_right) - assert force_top == pytest.approx(-1 * force_bottom) - # checking equal forces on sides - assert force_left == pytest.approx(force_bottom) - # checking report metadata - # TODO Figure out how to identify which boundary is applied - assert fem_problem.sensors.ReactionForceSensorLeft.report_metadata()["surface"] == { - "function": "boundary_left", - "args": {}, - } - assert fem_problem.sensors.ReactionForceSensorRight.report_metadata()["surface"] == { - "function": "boundary_right", - "args": {}, - } - assert fem_problem.sensors.ReactionForceSensorTop.report_metadata()["surface"] == { - "function": "boundary_top", - "args": {}, - } - assert fem_problem.sensors.ReactionForceSensorBottom.report_metadata()["surface"] == { - "function": "boundary_bottom", - "args": {}, - } - - if dim == 3: - force_front = fem_problem.sensors.ReactionForceSensorFront.get_last_entry().magnitude[1] - force_back = fem_problem.sensors.ReactionForceSensorBack.get_last_entry().magnitude[1] - - # checking opposing forces front-back - assert force_front == pytest.approx(-1 * force_back) - # checking equal forces left-front - assert force_left == pytest.approx(force_front) - # checking report metadata - assert fem_problem.sensors.ReactionForceSensorFront.report_metadata()["surface"] == { - "function": "boundary_front", - "args": {}, - } - assert fem_problem.sensors.ReactionForceSensorBack.report_metadata()["surface"] == { - "function": "boundary_back", - "args": {}, - } - - -@pytest.mark.parametrize("dim", [2, 3]) -@pytest.mark.parametrize("degree", [1, 2]) -def test_full_boundary_stress(dim: int, degree: int) -> None: - setup_parameters = SimpleCube.default_parameters() - setup_parameters["dim"] = dim * ureg("") - setup_parameters["degree"] = degree * ureg("") - setup_parameters["strain_state"] = "multiaxial" * ureg("") - cube = SimpleCube(setup_parameters) - default_setup, fem_parameters = LinearElasticity.default_parameters() - fem_parameters["nu"] = 0.2 * ureg("") - fem_problem = LinearElasticity(cube, fem_parameters) - - # define stress sensor - if dim == 2: - sensor_location = [0.5, 0.5, 0.0] - elif dim == 3: - sensor_location = [0.5, 0.5, 0.5] - stress_sensor = StressSensor(sensor_location) - fem_problem.add_sensor(stress_sensor) - - fem_problem.experiment.apply_displ_load(0.002 * ureg("m")) - fem_problem.solve() - - # check homogeneous stress state - stress = fem_problem.sensors.StressSensor.get_last_entry().magnitude - if dim == 2: - assert stress[0] == pytest.approx(stress[3]) - if dim == 3: - assert stress[0] == pytest.approx(stress[4]) - assert stress[0] == pytest.approx(stress[8]) diff --git a/tests/sensor_definition/test_sensor_schema.py b/tests/sensor_definition/test_sensor_schema.py deleted file mode 100755 index 89b9bb2..0000000 --- a/tests/sensor_definition/test_sensor_schema.py +++ /dev/null @@ -1,39 +0,0 @@ -import importlib -import inspect -import os -from abc import ABC - -from fenicsxconcrete.sensor_definition.base_sensor import BaseSensor -from fenicsxconcrete.sensor_definition.sensor_schema import generate_sensor_schema - - -def import_classes_from_module(module_name): - module = importlib.import_module(module_name) - class_names = set() - - module_path = os.path.dirname(module.__file__) - for file_name in os.listdir(module_path): - if file_name.endswith(".py") and file_name != "__init__.py": - file_path = os.path.join(module_path, file_name) - module_name = f"{module_name}.{os.path.splitext(file_name)[0]}" - spec = importlib.util.spec_from_file_location(module_name, file_path) - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) - - for name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and obj is not ABC and issubclass(obj, BaseSensor): - class_names.add(obj.__name__) - - return class_names - - -def test_classes_in_dictionary(): - module_name = "fenicsxconcrete.sensor_definition" - - # Import all classes from the module - classes = import_classes_from_module(module_name) - dict_sensors_schema = generate_sensor_schema() - - # Check if the classes are present in the dictionary keys - for cls in classes: - assert cls in dict_sensors_schema["definitions"] diff --git a/tests/sensor_definition/test_sensors.py b/tests/sensor_definition/test_sensors.py deleted file mode 100644 index d827c06..0000000 --- a/tests/sensor_definition/test_sensors.py +++ /dev/null @@ -1,78 +0,0 @@ -import pytest - -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity -from fenicsxconcrete.sensor_definition import ( - DisplacementSensor, - DOHSensor, - ReactionForceSensor, - StrainSensor, - StressSensor, - TemperatureSensor, - YoungsModulusSensor, -) -from fenicsxconcrete.util import ureg - - -def test_base_sensor() -> None: - """Testing basic functionality using the displacement sensor as example""" - default_setup, default_parameters = LinearElasticity.default_parameters() - fem_problem = LinearElasticity(default_setup, default_parameters) - - # define sensors - sensor_location = [0.0, 0.0, 0.0] - sensor = DisplacementSensor(sensor_location) - fem_problem.add_sensor(sensor) - - fem_problem.solve() # time = dt: dt_default == 1 - fem_problem.solve() # time = 2*dt: - u_sensor = fem_problem.sensors.DisplacementSensor - - # testing get data list - assert u_sensor.get_data_list().units == pytest.approx(u_sensor.units) - # testing get time list - assert u_sensor.get_time_list().magnitude == pytest.approx([fem_problem.p["dt"], 2 * fem_problem.p["dt"]]) - # testing get last data point - assert u_sensor.get_data_list()[-1].magnitude == pytest.approx(u_sensor.get_last_entry().magnitude) - # testing get data at time x - assert u_sensor.get_data_list()[1].magnitude == pytest.approx( - u_sensor.get_data_at_time(t=2 * fem_problem.p["dt"]).magnitude - ) - # testing value error for wrong time - with pytest.raises(ValueError): - u_sensor.get_data_at_time(t=42) - # testing set unit - m_data = u_sensor.get_last_entry() - u_sensor.set_units("mm") - mm_data = u_sensor.get_last_entry() - # check units - assert u_sensor.get_last_entry().units == ureg.millimeter - # check magnitude - assert m_data.magnitude == pytest.approx(mm_data.magnitude / 1000) - # testing metadata report - metadata = u_sensor.report_metadata() - true_metadata = { - "id": "DisplacementSensor", - "type": "DisplacementSensor", - "units": "millimeter", - "dimensionality": "[length]", - } - for key in true_metadata: - assert key in metadata and true_metadata[key] == metadata[key] - - -@pytest.mark.parametrize( - "sensor", - [ - DisplacementSensor, - ReactionForceSensor, - StressSensor, - StrainSensor, - DOHSensor, - YoungsModulusSensor, - TemperatureSensor, - ], -) -def test_base_units(sensor) -> None: - """test that the units defined in base_unit for the sensor are actually base units for this system""" - dummy_value = 1 * sensor.base_unit() - assert dummy_value.magnitude == dummy_value.to_base_units().magnitude diff --git a/tests/util/test_basic_logging.py b/tests/util/test_basic_logging.py deleted file mode 100644 index 72bd87b..0000000 --- a/tests/util/test_basic_logging.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Test `set_log_levels` and document how logging might be controlled for -application codes""" - -import logging - -import dolfinx -import ffcx -import pytest -import ufl - -from fenicsxconcrete import set_log_levels -from fenicsxconcrete.experimental_setup.tensile_beam import TensileBeam -from fenicsxconcrete.finite_element_problem.linear_elasticity import LinearElasticity - - -def test_fenicsx_loggers(): - """application specific settings for FEniCSx""" - - # ### ufl and ffcx - ufl_logger = ufl.log.get_logger() - # it seems per default the levels are - # ufl: DEBUG (10) - # ffcx: WARNIG (30) - # but these are set to logging.WARNING per default by fenicsxconcrete - assert ufl_logger.getEffectiveLevel() == logging.WARNING - assert ffcx.logger.getEffectiveLevel() == logging.WARNING - - # ### dolfinx - initial_level = dolfinx.log.get_log_level() - assert initial_level.value == -1 # WARNING - - # dolfinx.log.set_log_level() only accepts dolfinx.log.LogLevel - with pytest.raises(TypeError): - dolfinx.log.set_log_level(-1) - with pytest.raises(TypeError): - dolfinx.log.set_log_level(logging.INFO) - with pytest.raises(TypeError): - dolfinx.log.set_log_level("INFO") - - dolfinx.log.set_log_level(dolfinx.log.LogLevel.ERROR) - current_level = dolfinx.log.get_log_level() - assert current_level.value == -2 - - # note that dolfinx.log.LogLevel has levels INFO, WARNING, ERROR and OFF - # and that the integer values do not follow the convention of the stdlib - # logging - dfx_levels = [ - (dolfinx.log.LogLevel.INFO, 0), - (dolfinx.log.LogLevel.WARNING, -1), - (dolfinx.log.LogLevel.ERROR, -2), - (dolfinx.log.LogLevel.OFF, -9), - ] - for lvl, value in dfx_levels: - dolfinx.log.set_log_level(lvl) - assert dolfinx.log.get_log_level().value == value - - -def test_set_log_levels(): - default_p = TensileBeam.default_parameters() - experiment = TensileBeam(default_p) - param = LinearElasticity.default_parameters()[1] - problem = LinearElasticity(experiment, param) - - # default level is logging.WARNING - for obj in [experiment, problem]: - assert obj.logger.getEffectiveLevel() == logging.WARNING - - # set level for each logger of package "fenicsxconcrete" - set_log_levels({"fenicsxconcrete": logging.INFO}) - for obj in [experiment, problem]: - assert obj.logger.getEffectiveLevel() == logging.INFO - - # or set log level individually - set_log_levels({"fenicsxconcrete": logging.DEBUG, problem.logger.name: logging.ERROR}) - assert experiment.logger.getEffectiveLevel() == logging.DEBUG - assert problem.logger.getEffectiveLevel() == logging.ERROR - - -if __name__ == "__main__": - test_fenicsx_loggers() - test_set_log_levels() diff --git a/tests/util/test_helpers.py b/tests/util/test_helpers.py deleted file mode 100644 index 1fb10c1..0000000 --- a/tests/util/test_helpers.py +++ /dev/null @@ -1,107 +0,0 @@ -import dolfinx as df -import numpy as np -import pytest -import ufl -from mpi4py import MPI -from pint import UnitRegistry - -from fenicsxconcrete.util import Parameters, QuadratureEvaluator, QuadratureRule, project - -ureg = UnitRegistry() - - -def test_parameters() -> None: - parameters = Parameters() - parameters["length"] = 42.0 * ureg.cm - - # Check if units are converted correctly - assert parameters["length"].units == ureg.meter - - parameters_2 = Parameters() - parameters_2["temperature"] = 2.0 * ureg.kelvin - - parameters_combined = parameters + parameters_2 - keys = parameters_combined.keys() - assert "length" in keys and "temperature" in keys - assert ( - parameters_combined["length"] == parameters["length"] - and parameters_combined["temperature"] == parameters_2["temperature"] - ) - - -def test_parameter_dic_functions() -> None: - parameters = Parameters() - # testing if adding None to dictionary works - new = parameters + None - assert new is parameters - - -def test_parameter_dic_update() -> None: - parameters = Parameters() - - # testing that update still requires a pint object - p_wo_pint = {"length": 0.006} - with pytest.raises(AssertionError): - parameters.update(p_wo_pint) - - # testing that conversion to base units works with update - length = 6000 - p_with_pint = {"length": length * ureg("mm")} - parameters.update(p_with_pint) - - assert parameters["length"].magnitude == length / 1000 - - -# @pytest.mark.parametrize("dim", [2, 3]) -def test_project() -> None: - mesh = df.mesh.create_unit_cube(MPI.COMM_SELF, 2, 2, 2) - P1 = df.fem.FunctionSpace(mesh, ("P", 1)) - u = df.fem.Function(P1) - v = df.fem.Function(P1) - u.interpolate(lambda x: x[0] + x[1] + x[2]) - project(u, P1, ufl.dx, v) - assert np.linalg.norm(u.vector.array - v.vector.array) / np.linalg.norm(u.vector.array) < 1e-4 - - -def test_quadrature_rule() -> None: - """check if all spaces and arrays are compatible with each other""" - rule = QuadratureRule() - mesh = df.mesh.create_unit_square(MPI.COMM_SELF, 2, 2) - - lagrange_space = df.fem.VectorFunctionSpace(mesh, ("Lagrange", 2)) - v = df.fem.Function(lagrange_space) - - v.interpolate(lambda x: (42.0 * x[0], 16.0 * x[1])) - - strain_form = ufl.sym(ufl.grad(v)) - strain_evaluator = QuadratureEvaluator(strain_form, mesh, rule) - - q_space = rule.create_quadrature_space(mesh) - q_function = df.fem.Function(q_space) - q_array = rule.create_quadrature_array(mesh, 1) - - assert q_function.vector.array.shape == q_array.shape - - q_vector_space = rule.create_quadrature_vector_space(mesh, 6) - q_vector_function = df.fem.Function(q_vector_space) - q_vector_array = rule.create_quadrature_array(mesh, 6) - - assert q_vector_function.vector.array.shape == q_vector_array.shape - - q_tensor_space = rule.create_quadrature_tensor_space(mesh, (2, 2)) - q_tensor_function = df.fem.Function(q_tensor_space) - q_tensor_array = rule.create_quadrature_array(mesh, (2, 2)) - - assert q_tensor_function.vector.array.shape == q_tensor_array.shape - - assert 6 * q_function.vector.array.size == q_vector_function.vector.array.size - assert 4 * q_function.vector.array.size == q_tensor_function.vector.array.size - - # check if project and QuadratureEvaluator give the same result - project(strain_form, q_tensor_space, rule.dx, q_tensor_function) - - assert ( - np.linalg.norm(q_tensor_function.vector.array - strain_evaluator.evaluate().flatten()) - / np.linalg.norm(q_tensor_function.vector.array) - < 1e-12 - ) diff --git a/tests/util/test_homogenization.py b/tests/util/test_homogenization.py deleted file mode 100644 index 2f1ad33..0000000 --- a/tests/util/test_homogenization.py +++ /dev/null @@ -1,167 +0,0 @@ -import pytest - -from fenicsxconcrete.util import ConcreteHomogenization - - -def test_thermal_homogenization(): - # input values - # matrix - matrix_E = 10 # MPa - matrix_poissions_ratio = 0.2 - matrix_compressive_strength = 10 - matrix_thermal_conductivity = 15 - - # aggregates - aggregate_E = 30 - aggregate_poissions_ratio = 0.25 - aggregate_radius = 2 # mm - aggregate_vol_frac = 0.3 # volume fraction - aggregate_thermal_conductivity = 100 - - # itz assumptions - itz_thickness = 0.2 # mm - itz_factor = 0.8 # percentage of stiffness of matrix - - homgenized_concrete_1 = ConcreteHomogenization( - E_matrix=matrix_E, - nu_matrix=matrix_poissions_ratio, - fc_matrix=matrix_compressive_strength, - kappa_matrix=matrix_thermal_conductivity, - ) - - homgenized_concrete_2 = ConcreteHomogenization( - E_matrix=matrix_E, - nu_matrix=matrix_poissions_ratio, - fc_matrix=matrix_compressive_strength, - kappa_matrix=matrix_thermal_conductivity, - ) - # adding uncoated - homgenized_concrete_1.add_uncoated_particle( - E=aggregate_E, - nu=aggregate_poissions_ratio, - volume_fraction=aggregate_vol_frac, - kappa=aggregate_thermal_conductivity, - ) - # adding coated - homgenized_concrete_2.add_coated_particle( - E_inclusion=aggregate_E, - nu_inclusion=aggregate_poissions_ratio, - itz_ratio=itz_factor, - radius=aggregate_radius, - coat_thickness=itz_thickness, - volume_fraction=aggregate_vol_frac, - kappa=aggregate_thermal_conductivity, - ) - - assert homgenized_concrete_1.kappa_eff == pytest.approx(homgenized_concrete_2.kappa_eff) - assert homgenized_concrete_1.kappa_eff == pytest.approx(25.980861244019145) - - -def test_stiffness_homogenization(): - # input values - # matrix - matrix_E = 10 # MPa - matrix_poissions_ratio = 0.2 - matrix_compressive_strength = 10 - - # air - air_E = 10 # MPa - air_vol_frac = 0.2 - - # aggregates - aggregate_E = 30 - aggregate_poissions_ratio = 0.25 - aggregate_radius = 2 # mm - aggregate_vol_frac = 0.3 # volume fraction - - # itz assumptions - itz_thickness = 0.2 # mm - itz_factor = 0.8 # percentage of stiffness of matrix - - # testing new code - homgenized_concrete = ConcreteHomogenization( - E_matrix=matrix_E, nu_matrix=matrix_poissions_ratio, fc_matrix=matrix_compressive_strength - ) - # adding airpores - homgenized_concrete.add_uncoated_particle(E=air_E, nu=matrix_poissions_ratio, volume_fraction=air_vol_frac) - # adding agregates - homgenized_concrete.add_coated_particle( - E_inclusion=aggregate_E, - nu_inclusion=aggregate_poissions_ratio, - itz_ratio=itz_factor, - radius=aggregate_radius, - coat_thickness=itz_thickness, - volume_fraction=aggregate_vol_frac, - ) - - assert homgenized_concrete.E_eff == pytest.approx(13.156471830404511) - assert homgenized_concrete.nu_eff == pytest.approx(0.21110139111362222) - assert homgenized_concrete.fc_eff == pytest.approx(11.317889983420725) - - -def test_volume_averages(): - # input values - # matrix - E = 42 # MPa - poissions_ratio = 0.3 - compressive_strength = 10 - matrix_C = 10 - matrix_rho = 30 - - aggregate_vol_frac = 0.5 # volume fraction - aggregate_radius = 10 - aggregate_C = 30 - aggregate_rho = 10 - - # itz assumptions - itz_thickness = 0.2 # mm - itz_factor = 0.8 # percentage of stiffness of matrix - - homgenized_concrete_1 = ConcreteHomogenization( - E_matrix=E, nu_matrix=poissions_ratio, fc_matrix=compressive_strength, rho_matrix=matrix_rho, C_matrix=matrix_C - ) - - homgenized_concrete_2 = ConcreteHomogenization( - E_matrix=E, nu_matrix=poissions_ratio, fc_matrix=compressive_strength, rho_matrix=matrix_rho, C_matrix=matrix_C - ) - # adding uncoated - homgenized_concrete_1.add_uncoated_particle( - E=E, nu=poissions_ratio, volume_fraction=aggregate_vol_frac, rho=aggregate_rho, C=aggregate_C - ) - # adding coated - homgenized_concrete_2.add_coated_particle( - E_inclusion=E, - nu_inclusion=poissions_ratio, - itz_ratio=itz_factor, - radius=aggregate_radius, - coat_thickness=itz_thickness, - volume_fraction=aggregate_vol_frac, - rho=aggregate_rho, - C=aggregate_C, - ) - - assert homgenized_concrete_1.C_vol_eff == pytest.approx(homgenized_concrete_2.C_vol_eff) - assert homgenized_concrete_1.rho_eff == pytest.approx(homgenized_concrete_2.rho_eff) - assert homgenized_concrete_1.rho_eff == pytest.approx(20) - assert homgenized_concrete_1.C_vol_eff == pytest.approx(300) - - -def test_heat_release(): - # input values - E = 10 # MPa - poissions_ratio = 0.2 - compressive_strength = 10 - aggregate_vol_frac = 0.5 # volume fraction - rho = 42 - Q = 7 - - homgenized_concrete = ConcreteHomogenization( - E_matrix=E, nu_matrix=poissions_ratio, fc_matrix=compressive_strength, rho_matrix=rho, Q_matrix=Q - ) - # testing computation of heat release wrt volume - assert homgenized_concrete.Q_vol_eff == pytest.approx(Q * rho) - - # adding aggregates - homgenized_concrete.add_uncoated_particle(E=E, nu=poissions_ratio, volume_fraction=aggregate_vol_frac, rho=rho) - - assert homgenized_concrete.Q_vol_eff == pytest.approx(Q * rho * aggregate_vol_frac)